var/home/core/zuul-output/0000755000175000017500000000000015113571665014537 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015113606320015466 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005302430515113606312017675 0ustar rootrootDec 02 14:16:07 crc systemd[1]: Starting Kubernetes Kubelet... Dec 02 14:16:07 crc restorecon[4693]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:07 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 14:16:08 crc restorecon[4693]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 14:16:08 crc restorecon[4693]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 02 14:16:08 crc kubenswrapper[4902]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 14:16:08 crc kubenswrapper[4902]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 02 14:16:08 crc kubenswrapper[4902]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 14:16:08 crc kubenswrapper[4902]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 14:16:08 crc kubenswrapper[4902]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 02 14:16:08 crc kubenswrapper[4902]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.943509 4902 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946419 4902 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946436 4902 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946440 4902 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946445 4902 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946449 4902 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946452 4902 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946457 4902 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946463 4902 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946468 4902 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946472 4902 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946477 4902 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946481 4902 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946485 4902 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946490 4902 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946494 4902 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946497 4902 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946501 4902 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946506 4902 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946511 4902 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946515 4902 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946519 4902 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946523 4902 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946528 4902 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946532 4902 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946536 4902 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946540 4902 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946543 4902 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946547 4902 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946551 4902 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946576 4902 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946579 4902 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946583 4902 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946587 4902 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946590 4902 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946594 4902 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946597 4902 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946601 4902 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946604 4902 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946607 4902 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946611 4902 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946614 4902 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946617 4902 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946621 4902 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946624 4902 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946628 4902 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946631 4902 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946635 4902 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946639 4902 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946642 4902 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946646 4902 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946651 4902 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946655 4902 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946658 4902 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946663 4902 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946668 4902 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946671 4902 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946675 4902 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946678 4902 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946682 4902 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946686 4902 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946689 4902 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946693 4902 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946696 4902 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946700 4902 feature_gate.go:330] unrecognized feature gate: Example Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946704 4902 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946713 4902 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946717 4902 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946721 4902 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946724 4902 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946728 4902 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.946731 4902 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.946962 4902 flags.go:64] FLAG: --address="0.0.0.0" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.946973 4902 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.946982 4902 flags.go:64] FLAG: --anonymous-auth="true" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.946987 4902 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.946992 4902 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.946997 4902 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947004 4902 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947009 4902 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947014 4902 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947018 4902 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947023 4902 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947027 4902 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947031 4902 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947035 4902 flags.go:64] FLAG: --cgroup-root="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947039 4902 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947044 4902 flags.go:64] FLAG: --client-ca-file="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947048 4902 flags.go:64] FLAG: --cloud-config="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947052 4902 flags.go:64] FLAG: --cloud-provider="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947056 4902 flags.go:64] FLAG: --cluster-dns="[]" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947061 4902 flags.go:64] FLAG: --cluster-domain="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947065 4902 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947070 4902 flags.go:64] FLAG: --config-dir="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947074 4902 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947078 4902 flags.go:64] FLAG: --container-log-max-files="5" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947083 4902 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947087 4902 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947092 4902 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947096 4902 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947101 4902 flags.go:64] FLAG: --contention-profiling="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947105 4902 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947110 4902 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947114 4902 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947118 4902 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947123 4902 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947127 4902 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947131 4902 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947135 4902 flags.go:64] FLAG: --enable-load-reader="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947139 4902 flags.go:64] FLAG: --enable-server="true" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947143 4902 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947149 4902 flags.go:64] FLAG: --event-burst="100" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947153 4902 flags.go:64] FLAG: --event-qps="50" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947158 4902 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947162 4902 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947166 4902 flags.go:64] FLAG: --eviction-hard="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947171 4902 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947175 4902 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947179 4902 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947183 4902 flags.go:64] FLAG: --eviction-soft="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947187 4902 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947191 4902 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947195 4902 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947199 4902 flags.go:64] FLAG: --experimental-mounter-path="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947203 4902 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947207 4902 flags.go:64] FLAG: --fail-swap-on="true" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947211 4902 flags.go:64] FLAG: --feature-gates="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947216 4902 flags.go:64] FLAG: --file-check-frequency="20s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947220 4902 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947224 4902 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947228 4902 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947232 4902 flags.go:64] FLAG: --healthz-port="10248" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947237 4902 flags.go:64] FLAG: --help="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947241 4902 flags.go:64] FLAG: --hostname-override="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947245 4902 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947249 4902 flags.go:64] FLAG: --http-check-frequency="20s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947253 4902 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947257 4902 flags.go:64] FLAG: --image-credential-provider-config="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947261 4902 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947265 4902 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947270 4902 flags.go:64] FLAG: --image-service-endpoint="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947273 4902 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947277 4902 flags.go:64] FLAG: --kube-api-burst="100" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947283 4902 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947287 4902 flags.go:64] FLAG: --kube-api-qps="50" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947291 4902 flags.go:64] FLAG: --kube-reserved="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947295 4902 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947299 4902 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947303 4902 flags.go:64] FLAG: --kubelet-cgroups="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947307 4902 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947311 4902 flags.go:64] FLAG: --lock-file="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947315 4902 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947319 4902 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947323 4902 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947329 4902 flags.go:64] FLAG: --log-json-split-stream="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947334 4902 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947338 4902 flags.go:64] FLAG: --log-text-split-stream="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947342 4902 flags.go:64] FLAG: --logging-format="text" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947346 4902 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947350 4902 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947354 4902 flags.go:64] FLAG: --manifest-url="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947358 4902 flags.go:64] FLAG: --manifest-url-header="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947363 4902 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947367 4902 flags.go:64] FLAG: --max-open-files="1000000" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947373 4902 flags.go:64] FLAG: --max-pods="110" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947377 4902 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947381 4902 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947385 4902 flags.go:64] FLAG: --memory-manager-policy="None" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947389 4902 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947393 4902 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947397 4902 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947401 4902 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947414 4902 flags.go:64] FLAG: --node-status-max-images="50" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947419 4902 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947441 4902 flags.go:64] FLAG: --oom-score-adj="-999" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947448 4902 flags.go:64] FLAG: --pod-cidr="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947453 4902 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947458 4902 flags.go:64] FLAG: --pod-manifest-path="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947462 4902 flags.go:64] FLAG: --pod-max-pids="-1" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947466 4902 flags.go:64] FLAG: --pods-per-core="0" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947470 4902 flags.go:64] FLAG: --port="10250" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947475 4902 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947479 4902 flags.go:64] FLAG: --provider-id="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947483 4902 flags.go:64] FLAG: --qos-reserved="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947487 4902 flags.go:64] FLAG: --read-only-port="10255" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947491 4902 flags.go:64] FLAG: --register-node="true" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947495 4902 flags.go:64] FLAG: --register-schedulable="true" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947499 4902 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947506 4902 flags.go:64] FLAG: --registry-burst="10" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947510 4902 flags.go:64] FLAG: --registry-qps="5" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947513 4902 flags.go:64] FLAG: --reserved-cpus="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947518 4902 flags.go:64] FLAG: --reserved-memory="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947522 4902 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947527 4902 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947531 4902 flags.go:64] FLAG: --rotate-certificates="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947535 4902 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947539 4902 flags.go:64] FLAG: --runonce="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947543 4902 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947547 4902 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947552 4902 flags.go:64] FLAG: --seccomp-default="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947573 4902 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947578 4902 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947582 4902 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947586 4902 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947590 4902 flags.go:64] FLAG: --storage-driver-password="root" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947594 4902 flags.go:64] FLAG: --storage-driver-secure="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947598 4902 flags.go:64] FLAG: --storage-driver-table="stats" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947602 4902 flags.go:64] FLAG: --storage-driver-user="root" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947606 4902 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947611 4902 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947616 4902 flags.go:64] FLAG: --system-cgroups="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947620 4902 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947627 4902 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947632 4902 flags.go:64] FLAG: --tls-cert-file="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947636 4902 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947645 4902 flags.go:64] FLAG: --tls-min-version="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947650 4902 flags.go:64] FLAG: --tls-private-key-file="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947654 4902 flags.go:64] FLAG: --topology-manager-policy="none" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947659 4902 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947663 4902 flags.go:64] FLAG: --topology-manager-scope="container" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947667 4902 flags.go:64] FLAG: --v="2" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947672 4902 flags.go:64] FLAG: --version="false" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947678 4902 flags.go:64] FLAG: --vmodule="" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947683 4902 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.947688 4902 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947790 4902 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947796 4902 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947799 4902 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947804 4902 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947808 4902 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947813 4902 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947817 4902 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947822 4902 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947827 4902 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947830 4902 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947834 4902 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947837 4902 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947841 4902 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947845 4902 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947848 4902 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947852 4902 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947856 4902 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947861 4902 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947864 4902 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947868 4902 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947872 4902 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947877 4902 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947881 4902 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947884 4902 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947888 4902 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947891 4902 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947895 4902 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947899 4902 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947902 4902 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947905 4902 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947909 4902 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947912 4902 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947916 4902 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947919 4902 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947923 4902 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947926 4902 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947931 4902 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947935 4902 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947939 4902 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947943 4902 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947947 4902 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947950 4902 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947954 4902 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947957 4902 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947961 4902 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947964 4902 feature_gate.go:330] unrecognized feature gate: Example Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947968 4902 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947972 4902 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947976 4902 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947981 4902 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947985 4902 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947989 4902 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947992 4902 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947996 4902 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.947999 4902 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948003 4902 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948006 4902 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948010 4902 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948014 4902 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948017 4902 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948021 4902 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948024 4902 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948028 4902 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948031 4902 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948034 4902 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948038 4902 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948041 4902 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948048 4902 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948051 4902 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948054 4902 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.948058 4902 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.948064 4902 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.961387 4902 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.961498 4902 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961685 4902 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961709 4902 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961721 4902 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961731 4902 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961742 4902 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961751 4902 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961760 4902 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961768 4902 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961776 4902 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961785 4902 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961792 4902 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961800 4902 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961808 4902 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961816 4902 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961823 4902 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961831 4902 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961839 4902 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961850 4902 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961861 4902 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961872 4902 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961883 4902 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961892 4902 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961900 4902 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961909 4902 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961920 4902 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961930 4902 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961939 4902 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961952 4902 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961963 4902 feature_gate.go:330] unrecognized feature gate: Example Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961973 4902 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961984 4902 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.961993 4902 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962001 4902 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962009 4902 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962019 4902 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962029 4902 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962039 4902 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962050 4902 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962061 4902 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962071 4902 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962082 4902 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962096 4902 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962112 4902 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962126 4902 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962142 4902 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962155 4902 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962165 4902 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962177 4902 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962189 4902 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962202 4902 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962215 4902 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962229 4902 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962239 4902 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962248 4902 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962256 4902 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962264 4902 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962272 4902 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962280 4902 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962289 4902 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962301 4902 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962313 4902 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962323 4902 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962334 4902 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962344 4902 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962354 4902 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962364 4902 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962371 4902 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962379 4902 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962387 4902 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962395 4902 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962403 4902 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.962417 4902 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962697 4902 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962715 4902 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962724 4902 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962734 4902 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962744 4902 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962754 4902 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962765 4902 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962776 4902 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962787 4902 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962797 4902 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962806 4902 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962814 4902 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962822 4902 feature_gate.go:330] unrecognized feature gate: Example Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962831 4902 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962839 4902 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962847 4902 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962856 4902 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962866 4902 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962876 4902 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962890 4902 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962906 4902 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962917 4902 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962930 4902 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962941 4902 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962952 4902 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962962 4902 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962972 4902 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962983 4902 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.962995 4902 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963005 4902 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963013 4902 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963021 4902 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963032 4902 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963041 4902 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963050 4902 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963059 4902 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963067 4902 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963078 4902 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963086 4902 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963094 4902 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963102 4902 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963109 4902 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963117 4902 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963124 4902 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963136 4902 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963145 4902 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963154 4902 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963162 4902 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963170 4902 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963178 4902 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963186 4902 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963194 4902 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963201 4902 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963209 4902 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963217 4902 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963225 4902 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963233 4902 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963241 4902 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963248 4902 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963256 4902 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963263 4902 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963271 4902 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963279 4902 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963288 4902 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963297 4902 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963306 4902 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963314 4902 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963322 4902 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963332 4902 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963342 4902 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 14:16:08 crc kubenswrapper[4902]: W1202 14:16:08.963351 4902 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.963365 4902 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.964188 4902 server.go:940] "Client rotation is on, will bootstrap in background" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.969641 4902 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.970051 4902 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.970917 4902 server.go:997] "Starting client certificate rotation" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.970966 4902 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.971210 4902 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-10 00:48:26.937339405 +0000 UTC Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.971339 4902 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.978287 4902 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.981066 4902 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 14:16:08 crc kubenswrapper[4902]: E1202 14:16:08.981435 4902 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Dec 02 14:16:08 crc kubenswrapper[4902]: I1202 14:16:08.990803 4902 log.go:25] "Validated CRI v1 runtime API" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.015552 4902 log.go:25] "Validated CRI v1 image API" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.017945 4902 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.020592 4902 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-02-14-10-59-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.020634 4902 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.040810 4902 manager.go:217] Machine: {Timestamp:2025-12-02 14:16:09.039289163 +0000 UTC m=+0.230597912 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654116352 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:acc9633f-b885-4291-bd1d-c1f58994796b BootID:6bdcc9b0-fdff-4d27-a2f4-57188241bda3 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827056128 Type:vfs Inodes:4108168 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108168 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:e9:5f:64 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:e9:5f:64 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:73:17:b2 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:92:95:94 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:b7:dc:70 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:66:ff:72 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:92:37:f2:92:c4:ec Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:1a:2e:be:1f:ea:85 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654116352 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.041079 4902 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.041254 4902 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.041861 4902 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.042112 4902 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.042156 4902 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.042393 4902 topology_manager.go:138] "Creating topology manager with none policy" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.042406 4902 container_manager_linux.go:303] "Creating device plugin manager" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.042656 4902 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.042693 4902 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.043133 4902 state_mem.go:36] "Initialized new in-memory state store" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.043388 4902 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.044263 4902 kubelet.go:418] "Attempting to sync node with API server" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.044299 4902 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.044325 4902 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.044341 4902 kubelet.go:324] "Adding apiserver pod source" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.044355 4902 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.046284 4902 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.046682 4902 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 02 14:16:09 crc kubenswrapper[4902]: W1202 14:16:09.046798 4902 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Dec 02 14:16:09 crc kubenswrapper[4902]: W1202 14:16:09.046762 4902 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.046863 4902 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.046901 4902 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.047650 4902 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.048297 4902 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.048331 4902 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.048343 4902 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.048354 4902 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.048369 4902 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.048379 4902 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.048388 4902 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.048403 4902 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.048413 4902 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.048425 4902 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.048457 4902 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.048469 4902 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.048737 4902 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.049274 4902 server.go:1280] "Started kubelet" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.049971 4902 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.049850 4902 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.050522 4902 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.049862 4902 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 02 14:16:09 crc systemd[1]: Started Kubernetes Kubelet. Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.052186 4902 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.251:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187d6ba130a694c5 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 14:16:09.049232581 +0000 UTC m=+0.240541300,LastTimestamp:2025-12-02 14:16:09.049232581 +0000 UTC m=+0.240541300,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.052803 4902 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.052840 4902 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.053183 4902 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.053200 4902 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.053184 4902 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 05:18:26.859801137 +0000 UTC Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.053367 4902 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.053660 4902 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.056032 4902 factory.go:55] Registering systemd factory Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.056207 4902 factory.go:221] Registration of the systemd container factory successfully Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.056446 4902 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" interval="200ms" Dec 02 14:16:09 crc kubenswrapper[4902]: W1202 14:16:09.056476 4902 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.056556 4902 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.056857 4902 server.go:460] "Adding debug handlers to kubelet server" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.061092 4902 factory.go:153] Registering CRI-O factory Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.061123 4902 factory.go:221] Registration of the crio container factory successfully Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.061241 4902 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.061277 4902 factory.go:103] Registering Raw factory Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.061292 4902 manager.go:1196] Started watching for new ooms in manager Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.061859 4902 manager.go:319] Starting recovery of all containers Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070304 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070362 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070391 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070407 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070422 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070436 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070450 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070476 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070493 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070509 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070522 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070586 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070602 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070621 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070638 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070651 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070666 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070680 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070694 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070711 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070734 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070750 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070770 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070785 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070801 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070815 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070834 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070846 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070869 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070893 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070937 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070953 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070965 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070977 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.070988 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071000 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071012 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071023 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071038 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071050 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071061 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071072 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071085 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071095 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071107 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071120 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071132 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071144 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071157 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071170 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071182 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071193 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071208 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071221 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071236 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071305 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071320 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071332 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071344 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071357 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071370 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071382 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071394 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071406 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071419 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071431 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071444 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071455 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071466 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071478 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071493 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071506 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071519 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071543 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071590 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071609 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071625 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071642 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071657 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071674 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071700 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071716 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071733 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071748 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071763 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071777 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071796 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071812 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071827 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071842 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071856 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071871 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071887 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071901 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071917 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071934 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071949 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071965 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071978 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.071990 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072006 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072021 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072037 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072055 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072077 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072095 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072113 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072130 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072147 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072165 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072182 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072200 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072217 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072232 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072248 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072265 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072280 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072296 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072312 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072331 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072346 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072372 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072390 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072405 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072421 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072436 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072451 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072466 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072482 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072496 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072512 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072526 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072538 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072556 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072604 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072617 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072630 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072642 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072653 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072666 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072678 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072690 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072703 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072715 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072726 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072737 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072748 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072759 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072771 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072783 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072850 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072864 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072876 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072888 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072899 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072921 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072945 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072961 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072977 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.072993 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073009 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073026 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073042 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073059 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073076 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073093 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073108 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073124 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073139 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073154 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073168 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073185 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073199 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073214 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073231 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073246 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073261 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073276 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073292 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.073308 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076449 4902 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076506 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076582 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076599 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076612 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076626 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076639 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076651 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076666 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076681 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076694 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076708 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076722 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076737 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076751 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076764 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076778 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076793 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076811 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076826 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076842 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076858 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076873 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076887 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076902 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076918 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076932 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076946 4902 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076959 4902 reconstruct.go:97] "Volume reconstruction finished" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.076968 4902 reconciler.go:26] "Reconciler: start to sync state" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.088549 4902 manager.go:324] Recovery completed Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.100124 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.101762 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.101893 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.102008 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.102843 4902 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.102957 4902 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.103031 4902 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.103072 4902 state_mem.go:36] "Initialized new in-memory state store" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.105246 4902 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.105292 4902 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.105324 4902 kubelet.go:2335] "Starting kubelet main sync loop" Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.105439 4902 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 02 14:16:09 crc kubenswrapper[4902]: W1202 14:16:09.105948 4902 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.106003 4902 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.111589 4902 policy_none.go:49] "None policy: Start" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.112594 4902 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.112626 4902 state_mem.go:35] "Initializing new in-memory state store" Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.154707 4902 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.161461 4902 manager.go:334] "Starting Device Plugin manager" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.161503 4902 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.161515 4902 server.go:79] "Starting device plugin registration server" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.161979 4902 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.161997 4902 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.162226 4902 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.162348 4902 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.162361 4902 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.172381 4902 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.205667 4902 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.205789 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.206995 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.207044 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.207057 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.207242 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.207499 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.207549 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.208201 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.208242 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.208253 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.208380 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.208427 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.208446 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.208453 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.208519 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.208556 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.209114 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.209135 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.209143 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.209255 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.209390 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.209424 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.209451 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.209466 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.209476 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.209747 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.209764 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.209772 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.209845 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.209889 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.209921 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.210315 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.210338 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.210348 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.210436 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.210474 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.210509 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.210535 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.210545 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.210978 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.210997 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.211008 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.211020 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.211039 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.211048 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.257058 4902 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" interval="400ms" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.262233 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.263798 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.263823 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.263833 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.263862 4902 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.264296 4902 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.251:6443: connect: connection refused" node="crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.278825 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.278873 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.278899 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.278921 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.278943 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.278967 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.279010 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.279051 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.279084 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.279148 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.279205 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.279254 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.279301 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.279339 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.279381 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.380929 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381002 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381034 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381063 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381095 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381125 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381154 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381180 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381187 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381259 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381278 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381305 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381347 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381231 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381317 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381230 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381211 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381400 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381463 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381430 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381598 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381635 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381665 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381692 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381719 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381727 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381737 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381782 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381800 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.381922 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.464637 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.466138 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.466192 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.466211 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.466242 4902 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.467060 4902 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.251:6443: connect: connection refused" node="crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.541639 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.552755 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.569414 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: W1202 14:16:09.574264 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-035c1ef76accfaece3460b519d09834cda58279145f3b0bd387995ed42c20517 WatchSource:0}: Error finding container 035c1ef76accfaece3460b519d09834cda58279145f3b0bd387995ed42c20517: Status 404 returned error can't find the container with id 035c1ef76accfaece3460b519d09834cda58279145f3b0bd387995ed42c20517 Dec 02 14:16:09 crc kubenswrapper[4902]: W1202 14:16:09.578521 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-f979e693c9fa35322c22af6e0c278f74120c4b0a4739da8689740cbaf944b2d2 WatchSource:0}: Error finding container f979e693c9fa35322c22af6e0c278f74120c4b0a4739da8689740cbaf944b2d2: Status 404 returned error can't find the container with id f979e693c9fa35322c22af6e0c278f74120c4b0a4739da8689740cbaf944b2d2 Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.587088 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.594167 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:09 crc kubenswrapper[4902]: W1202 14:16:09.603157 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-95609b08a0d57f9c4a6233624bd797bb654a2129c3e2944a86398ecf9655e6db WatchSource:0}: Error finding container 95609b08a0d57f9c4a6233624bd797bb654a2129c3e2944a86398ecf9655e6db: Status 404 returned error can't find the container with id 95609b08a0d57f9c4a6233624bd797bb654a2129c3e2944a86398ecf9655e6db Dec 02 14:16:09 crc kubenswrapper[4902]: W1202 14:16:09.617729 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-2b62530d15777b924915d7667cfc491cfa25e32f0687a6a8b7a2a9d8774eec49 WatchSource:0}: Error finding container 2b62530d15777b924915d7667cfc491cfa25e32f0687a6a8b7a2a9d8774eec49: Status 404 returned error can't find the container with id 2b62530d15777b924915d7667cfc491cfa25e32f0687a6a8b7a2a9d8774eec49 Dec 02 14:16:09 crc kubenswrapper[4902]: W1202 14:16:09.621036 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-9ee2a6dfa377dbe41cb995b7353ed75fe4d26eb2b199b34a804c1f0743484d7d WatchSource:0}: Error finding container 9ee2a6dfa377dbe41cb995b7353ed75fe4d26eb2b199b34a804c1f0743484d7d: Status 404 returned error can't find the container with id 9ee2a6dfa377dbe41cb995b7353ed75fe4d26eb2b199b34a804c1f0743484d7d Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.633059 4902 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.251:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187d6ba130a694c5 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 14:16:09.049232581 +0000 UTC m=+0.240541300,LastTimestamp:2025-12-02 14:16:09.049232581 +0000 UTC m=+0.240541300,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.658313 4902 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" interval="800ms" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.867409 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.868727 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.868770 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.868783 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:09 crc kubenswrapper[4902]: I1202 14:16:09.868813 4902 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 14:16:09 crc kubenswrapper[4902]: E1202 14:16:09.869230 4902 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.251:6443: connect: connection refused" node="crc" Dec 02 14:16:10 crc kubenswrapper[4902]: W1202 14:16:10.006974 4902 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Dec 02 14:16:10 crc kubenswrapper[4902]: E1202 14:16:10.007069 4902 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Dec 02 14:16:10 crc kubenswrapper[4902]: W1202 14:16:10.024731 4902 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Dec 02 14:16:10 crc kubenswrapper[4902]: E1202 14:16:10.024790 4902 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.050971 4902 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.054048 4902 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 06:19:12.305350404 +0000 UTC Dec 02 14:16:10 crc kubenswrapper[4902]: W1202 14:16:10.061667 4902 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Dec 02 14:16:10 crc kubenswrapper[4902]: E1202 14:16:10.061744 4902 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.111936 4902 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="1cd96368a989fdbf63188113b3df3433eb65bb3bbb1930e29f408bc2c8dc0a25" exitCode=0 Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.112020 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"1cd96368a989fdbf63188113b3df3433eb65bb3bbb1930e29f408bc2c8dc0a25"} Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.112098 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"f979e693c9fa35322c22af6e0c278f74120c4b0a4739da8689740cbaf944b2d2"} Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.112167 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.113542 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.113589 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.113600 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.114534 4902 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1" exitCode=0 Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.114596 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1"} Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.114611 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9ee2a6dfa377dbe41cb995b7353ed75fe4d26eb2b199b34a804c1f0743484d7d"} Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.114664 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.115506 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.115531 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.115539 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.116832 4902 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198" exitCode=0 Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.116926 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198"} Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.117091 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2b62530d15777b924915d7667cfc491cfa25e32f0687a6a8b7a2a9d8774eec49"} Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.117197 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.118430 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e"} Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.118531 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"95609b08a0d57f9c4a6233624bd797bb654a2129c3e2944a86398ecf9655e6db"} Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.118458 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.118999 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.119016 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.119026 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.119355 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.119423 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.119447 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.121885 4902 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb" exitCode=0 Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.121937 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb"} Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.121967 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"035c1ef76accfaece3460b519d09834cda58279145f3b0bd387995ed42c20517"} Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.122112 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.123201 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.123225 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.123234 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:10 crc kubenswrapper[4902]: W1202 14:16:10.413549 4902 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.251:6443: connect: connection refused Dec 02 14:16:10 crc kubenswrapper[4902]: E1202 14:16:10.413645 4902 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.251:6443: connect: connection refused" logger="UnhandledError" Dec 02 14:16:10 crc kubenswrapper[4902]: E1202 14:16:10.459464 4902 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" interval="1.6s" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.669357 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.670341 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.670384 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.670396 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:10 crc kubenswrapper[4902]: I1202 14:16:10.670426 4902 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.054629 4902 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 01:48:35.803182832 +0000 UTC Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.054712 4902 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1067h32m24.748475435s for next certificate rotation Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.105069 4902 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.129475 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"006e342754cff26c4bd073a07cbe4e59fcecdee9623d669a9866b6e838455fab"} Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.129539 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c9a316e8c2c059475ace40728063bbcaf73c55837036254e0989f1fc6ae35fb0"} Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.129604 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"cfafb80a72413884fdfc5e2da797d7d49b920ec54d292e8047453423f1148b0c"} Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.129763 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.131345 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.131404 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.131421 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.133159 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7"} Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.133220 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.133239 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7"} Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.133269 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46"} Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.134672 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.134737 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.134761 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.136078 4902 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5" exitCode=0 Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.136184 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5"} Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.136341 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.137636 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.137678 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.137697 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.138743 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"2bfd4f0f73119258643a647e8ee258ad4c08360b2c365a41d6de9966e2f56dc4"} Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.138901 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.140406 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.140472 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.140497 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.144288 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45"} Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.144325 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676"} Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.144339 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004"} Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.144353 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106"} Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.144365 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b"} Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.144444 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.145420 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.145471 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.145489 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:11 crc kubenswrapper[4902]: I1202 14:16:11.753033 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.149528 4902 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e" exitCode=0 Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.149620 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e"} Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.149675 4902 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.149724 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.149737 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.149774 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.149808 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.149781 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.151481 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.151502 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.151524 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.151488 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.151546 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.151556 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.151588 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.151598 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.151537 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.151859 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.151884 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:12 crc kubenswrapper[4902]: I1202 14:16:12.151897 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:13 crc kubenswrapper[4902]: I1202 14:16:13.155973 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8"} Dec 02 14:16:13 crc kubenswrapper[4902]: I1202 14:16:13.156014 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59"} Dec 02 14:16:13 crc kubenswrapper[4902]: I1202 14:16:13.156029 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c"} Dec 02 14:16:13 crc kubenswrapper[4902]: I1202 14:16:13.156050 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:13 crc kubenswrapper[4902]: I1202 14:16:13.157014 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:13 crc kubenswrapper[4902]: I1202 14:16:13.157074 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:13 crc kubenswrapper[4902]: I1202 14:16:13.157092 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:13 crc kubenswrapper[4902]: I1202 14:16:13.853146 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:13 crc kubenswrapper[4902]: I1202 14:16:13.853443 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:13 crc kubenswrapper[4902]: I1202 14:16:13.855029 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:13 crc kubenswrapper[4902]: I1202 14:16:13.855083 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:13 crc kubenswrapper[4902]: I1202 14:16:13.855100 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:14 crc kubenswrapper[4902]: I1202 14:16:14.166842 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494"} Dec 02 14:16:14 crc kubenswrapper[4902]: I1202 14:16:14.166914 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f"} Dec 02 14:16:14 crc kubenswrapper[4902]: I1202 14:16:14.167024 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:14 crc kubenswrapper[4902]: I1202 14:16:14.168387 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:14 crc kubenswrapper[4902]: I1202 14:16:14.168447 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:14 crc kubenswrapper[4902]: I1202 14:16:14.168466 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:15 crc kubenswrapper[4902]: I1202 14:16:15.154018 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:15 crc kubenswrapper[4902]: I1202 14:16:15.154399 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:15 crc kubenswrapper[4902]: I1202 14:16:15.156003 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:15 crc kubenswrapper[4902]: I1202 14:16:15.156057 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:15 crc kubenswrapper[4902]: I1202 14:16:15.156073 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:15 crc kubenswrapper[4902]: I1202 14:16:15.169543 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:15 crc kubenswrapper[4902]: I1202 14:16:15.170788 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:15 crc kubenswrapper[4902]: I1202 14:16:15.170866 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:15 crc kubenswrapper[4902]: I1202 14:16:15.170889 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:15 crc kubenswrapper[4902]: I1202 14:16:15.977412 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:15 crc kubenswrapper[4902]: I1202 14:16:15.977768 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:15 crc kubenswrapper[4902]: I1202 14:16:15.979292 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:15 crc kubenswrapper[4902]: I1202 14:16:15.979372 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:15 crc kubenswrapper[4902]: I1202 14:16:15.979391 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:17 crc kubenswrapper[4902]: I1202 14:16:17.414664 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:17 crc kubenswrapper[4902]: I1202 14:16:17.414906 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:17 crc kubenswrapper[4902]: I1202 14:16:17.416409 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:17 crc kubenswrapper[4902]: I1202 14:16:17.416477 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:17 crc kubenswrapper[4902]: I1202 14:16:17.416498 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:17 crc kubenswrapper[4902]: I1202 14:16:17.421854 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:17 crc kubenswrapper[4902]: I1202 14:16:17.760184 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:18 crc kubenswrapper[4902]: I1202 14:16:18.178357 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:18 crc kubenswrapper[4902]: I1202 14:16:18.179623 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:18 crc kubenswrapper[4902]: I1202 14:16:18.179671 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:18 crc kubenswrapper[4902]: I1202 14:16:18.179683 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:18 crc kubenswrapper[4902]: I1202 14:16:18.856434 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 02 14:16:18 crc kubenswrapper[4902]: I1202 14:16:18.856765 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:18 crc kubenswrapper[4902]: I1202 14:16:18.858228 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:18 crc kubenswrapper[4902]: I1202 14:16:18.858306 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:18 crc kubenswrapper[4902]: I1202 14:16:18.858343 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:19 crc kubenswrapper[4902]: E1202 14:16:19.172521 4902 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 02 14:16:19 crc kubenswrapper[4902]: I1202 14:16:19.180386 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:19 crc kubenswrapper[4902]: I1202 14:16:19.181615 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:19 crc kubenswrapper[4902]: I1202 14:16:19.181686 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:19 crc kubenswrapper[4902]: I1202 14:16:19.181720 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:19 crc kubenswrapper[4902]: I1202 14:16:19.186941 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:19 crc kubenswrapper[4902]: I1202 14:16:19.704833 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:20 crc kubenswrapper[4902]: I1202 14:16:20.182989 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:20 crc kubenswrapper[4902]: I1202 14:16:20.184299 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:20 crc kubenswrapper[4902]: I1202 14:16:20.184365 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:20 crc kubenswrapper[4902]: I1202 14:16:20.184387 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:20 crc kubenswrapper[4902]: E1202 14:16:20.672112 4902 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 02 14:16:20 crc kubenswrapper[4902]: I1202 14:16:20.760908 4902 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 14:16:20 crc kubenswrapper[4902]: I1202 14:16:20.761012 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 02 14:16:21 crc kubenswrapper[4902]: I1202 14:16:21.052080 4902 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 02 14:16:21 crc kubenswrapper[4902]: E1202 14:16:21.107217 4902 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 02 14:16:21 crc kubenswrapper[4902]: I1202 14:16:21.189187 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:21 crc kubenswrapper[4902]: I1202 14:16:21.190616 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:21 crc kubenswrapper[4902]: I1202 14:16:21.190657 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:21 crc kubenswrapper[4902]: I1202 14:16:21.190675 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:21 crc kubenswrapper[4902]: I1202 14:16:21.365585 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 02 14:16:21 crc kubenswrapper[4902]: I1202 14:16:21.365778 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:21 crc kubenswrapper[4902]: I1202 14:16:21.367983 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:21 crc kubenswrapper[4902]: I1202 14:16:21.368042 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:21 crc kubenswrapper[4902]: I1202 14:16:21.368053 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:21 crc kubenswrapper[4902]: W1202 14:16:21.741324 4902 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 02 14:16:21 crc kubenswrapper[4902]: I1202 14:16:21.741433 4902 trace.go:236] Trace[762520643]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 14:16:11.739) (total time: 10001ms): Dec 02 14:16:21 crc kubenswrapper[4902]: Trace[762520643]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (14:16:21.741) Dec 02 14:16:21 crc kubenswrapper[4902]: Trace[762520643]: [10.001655264s] [10.001655264s] END Dec 02 14:16:21 crc kubenswrapper[4902]: E1202 14:16:21.741461 4902 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 02 14:16:21 crc kubenswrapper[4902]: I1202 14:16:21.754018 4902 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 14:16:21 crc kubenswrapper[4902]: I1202 14:16:21.754147 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 02 14:16:22 crc kubenswrapper[4902]: E1202 14:16:22.061056 4902 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 02 14:16:22 crc kubenswrapper[4902]: I1202 14:16:22.169487 4902 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 02 14:16:22 crc kubenswrapper[4902]: I1202 14:16:22.169557 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 02 14:16:22 crc kubenswrapper[4902]: I1202 14:16:22.272308 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:22 crc kubenswrapper[4902]: I1202 14:16:22.273518 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:22 crc kubenswrapper[4902]: I1202 14:16:22.273585 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:22 crc kubenswrapper[4902]: I1202 14:16:22.273598 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:22 crc kubenswrapper[4902]: I1202 14:16:22.273625 4902 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 14:16:25 crc kubenswrapper[4902]: I1202 14:16:25.358969 4902 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 02 14:16:25 crc kubenswrapper[4902]: I1202 14:16:25.368975 4902 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 02 14:16:26 crc kubenswrapper[4902]: I1202 14:16:26.759824 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:26 crc kubenswrapper[4902]: I1202 14:16:26.760077 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:26 crc kubenswrapper[4902]: I1202 14:16:26.763392 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:26 crc kubenswrapper[4902]: I1202 14:16:26.763468 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:26 crc kubenswrapper[4902]: I1202 14:16:26.763512 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:26 crc kubenswrapper[4902]: I1202 14:16:26.769149 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:26 crc kubenswrapper[4902]: I1202 14:16:26.774898 4902 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.053109 4902 apiserver.go:52] "Watching apiserver" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.056950 4902 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.057262 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.057783 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.057781 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.057866 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.058019 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.058051 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.058110 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.058151 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.058242 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.058297 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.060299 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.060336 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.060368 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.060527 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.060642 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.060755 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.062037 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.062089 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.063363 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.096694 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.111974 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.125414 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.139909 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.152704 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.154089 4902 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.165455 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.165518 4902 trace.go:236] Trace[1536697687]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 14:16:12.614) (total time: 14551ms): Dec 02 14:16:27 crc kubenswrapper[4902]: Trace[1536697687]: ---"Objects listed" error: 14551ms (14:16:27.165) Dec 02 14:16:27 crc kubenswrapper[4902]: Trace[1536697687]: [14.551390511s] [14.551390511s] END Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.165662 4902 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.170221 4902 trace.go:236] Trace[1259057901]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 14:16:12.767) (total time: 14402ms): Dec 02 14:16:27 crc kubenswrapper[4902]: Trace[1259057901]: ---"Objects listed" error: 14402ms (14:16:27.170) Dec 02 14:16:27 crc kubenswrapper[4902]: Trace[1259057901]: [14.402204213s] [14.402204213s] END Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.170260 4902 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.170431 4902 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.171490 4902 trace.go:236] Trace[680864945]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 14:16:12.258) (total time: 14913ms): Dec 02 14:16:27 crc kubenswrapper[4902]: Trace[680864945]: ---"Objects listed" error: 14912ms (14:16:27.171) Dec 02 14:16:27 crc kubenswrapper[4902]: Trace[680864945]: [14.913109225s] [14.913109225s] END Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.171557 4902 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.175528 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.187881 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.201089 4902 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.216111 4902 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": EOF" start-of-body= Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.216358 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.216358 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": EOF" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.216851 4902 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.216917 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271013 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271071 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271100 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271138 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271164 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271188 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271207 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271225 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271242 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271260 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271280 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271300 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271321 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271344 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271367 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271357 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271390 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271414 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271437 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271462 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271481 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271502 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271527 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271550 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271584 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271612 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271641 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271665 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271687 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271710 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271733 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271756 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271777 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271798 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271818 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271840 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271861 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271882 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271907 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271932 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271953 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271974 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271983 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.271995 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272019 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272042 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272066 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272089 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272112 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272133 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272155 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272163 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272178 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272203 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272228 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272250 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272273 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272327 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272352 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272373 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272395 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272416 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272423 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272437 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272460 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272485 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272508 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272531 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272554 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272599 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272624 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272635 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272647 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272651 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272673 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272697 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272721 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272748 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272773 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272796 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272819 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272833 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272844 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272871 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272895 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272922 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272949 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.272973 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273000 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273011 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273022 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273052 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273066 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273076 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273147 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273180 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273208 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273238 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273265 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273291 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273318 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273343 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273366 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273395 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273418 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273422 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273472 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273491 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273500 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273544 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273602 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273629 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273653 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273659 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273696 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273724 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273750 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273780 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273808 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273833 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273856 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273881 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273906 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273932 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273956 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273981 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274003 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274024 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274048 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274070 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274094 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274117 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274142 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274168 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274189 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274210 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274233 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274254 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274277 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274299 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274322 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274346 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274371 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274393 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274418 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274439 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274464 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274487 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274509 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274539 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274755 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274785 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275529 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275574 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275596 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275617 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275640 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275662 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275693 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275716 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275739 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275764 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275786 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275906 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275941 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275966 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275989 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276011 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276032 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276054 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276076 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276099 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276123 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276146 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276229 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276256 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276280 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276320 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276343 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276366 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276390 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276413 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276435 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276459 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276482 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276508 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276533 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276558 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276600 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276626 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276651 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276674 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276698 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276721 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276746 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276769 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276789 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276807 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276823 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276843 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277862 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277883 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277901 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277919 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277938 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277954 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277973 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277993 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.278018 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279757 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279807 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279848 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279868 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279889 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279923 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279953 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279982 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280010 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280034 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280055 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280078 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280097 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280119 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280172 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280183 4902 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280194 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280204 4902 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280214 4902 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280225 4902 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280235 4902 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280246 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280256 4902 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280266 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280276 4902 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280285 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280295 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.280357 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.281416 4902 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.284392 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.284742 4902 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.285239 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.285765 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273764 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273821 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.273857 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274019 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274076 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274307 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274529 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274642 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.274760 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275082 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.275175 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276345 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.276474 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277072 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277190 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277385 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277403 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277459 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277514 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277645 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.277853 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.278047 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.278426 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.278453 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.290696 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.278652 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.278609 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.278959 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279114 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279200 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279266 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279319 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279435 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279448 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279674 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.279787 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.281172 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.281601 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.281629 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.281688 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.282054 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.282079 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.282748 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.283240 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.283244 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.283551 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.283609 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.283757 4902 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.284481 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.284750 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.284761 4902 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.285456 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.285479 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.285665 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.285882 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.286077 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.286092 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.286077 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.286237 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.286465 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.286498 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.286813 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.286835 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.286195 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.286941 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.287146 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.287350 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.287469 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.287602 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.287667 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.287821 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.287879 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.288003 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.288127 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.288129 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.288360 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.288620 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.288705 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.288803 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.289966 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.290330 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.290654 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.290791 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.290942 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.291250 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.291438 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.291489 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.291720 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.291744 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.291773 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.292013 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.292022 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.292130 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.292191 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.293318 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.293745 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.293935 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.294824 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.295440 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.295701 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.295818 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.295987 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.296138 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.296492 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.296856 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.296927 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.296969 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.297143 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.297025 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:27.797003956 +0000 UTC m=+18.988312665 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.297321 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.297581 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.297621 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.297660 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.297869 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.298111 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:16:27.798085767 +0000 UTC m=+18.989394476 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.298205 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:27.79819548 +0000 UTC m=+18.989504189 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.298211 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.298430 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.298476 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.298682 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.298848 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.298923 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.298962 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.299044 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.299440 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.299662 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.299699 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.299780 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.300092 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.300783 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.301207 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.301511 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.301708 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.301820 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.302231 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.303353 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.303357 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.303367 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.305331 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.305346 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.305439 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.305735 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.305764 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.305929 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.306921 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.307123 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.307182 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.307427 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.307603 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.307625 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.307640 4902 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.307714 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:27.807691754 +0000 UTC m=+18.999000663 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.308053 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.308548 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.308649 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.308663 4902 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.308694 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:27.808685033 +0000 UTC m=+18.999993742 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.318200 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.318205 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.318413 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.318858 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.319106 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.319252 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.319264 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.319538 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.319543 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.319814 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.320736 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.321457 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.321590 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.321616 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.321484 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.322510 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.322711 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.322938 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.322996 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.323602 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.323671 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.323770 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.324117 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.324228 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.324353 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.324448 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.324598 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.324660 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.324898 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.324781 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.325280 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.325617 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.325633 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.326175 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.326461 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.326600 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.326662 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.339949 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.344131 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.347183 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.360492 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.381209 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.381790 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382034 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.381919 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382443 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382604 4902 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382697 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382717 4902 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382730 4902 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382743 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382756 4902 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382770 4902 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382782 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382795 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382810 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382823 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382836 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382848 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382862 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382874 4902 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382888 4902 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382900 4902 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382912 4902 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382925 4902 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382937 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382950 4902 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382963 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382974 4902 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.382987 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383005 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383022 4902 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383038 4902 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383055 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383071 4902 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383087 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383105 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383124 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383141 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383157 4902 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383174 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383191 4902 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383207 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383225 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383242 4902 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383258 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383275 4902 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383292 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383307 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383325 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383341 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383362 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383381 4902 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383399 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383416 4902 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383434 4902 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383451 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383468 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383488 4902 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383504 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383520 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383535 4902 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383552 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383592 4902 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383610 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383627 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383643 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383659 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383676 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383693 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383709 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383730 4902 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383747 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383763 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383786 4902 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383803 4902 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383818 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383834 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383849 4902 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383865 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383884 4902 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383899 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383914 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383935 4902 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383951 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383967 4902 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.383982 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384000 4902 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384015 4902 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384033 4902 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384049 4902 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384065 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384080 4902 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384096 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384121 4902 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384139 4902 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384155 4902 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384171 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384187 4902 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384203 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384222 4902 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384238 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384297 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384317 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384334 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384349 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384365 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384381 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384399 4902 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384415 4902 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384432 4902 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384447 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384462 4902 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384477 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384495 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384515 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384532 4902 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384549 4902 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384590 4902 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384607 4902 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384623 4902 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384639 4902 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384656 4902 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384673 4902 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384691 4902 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384706 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384723 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384739 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384755 4902 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384771 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384830 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384850 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384867 4902 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384883 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384899 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384915 4902 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384932 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384947 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384963 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384979 4902 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.384994 4902 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385023 4902 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385040 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385057 4902 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385074 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385090 4902 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385106 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385121 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385138 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385152 4902 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385166 4902 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385183 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385201 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385214 4902 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385225 4902 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385237 4902 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385247 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385258 4902 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385271 4902 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385284 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385297 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385308 4902 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385319 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385330 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385341 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385352 4902 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385363 4902 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385374 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385384 4902 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385397 4902 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385408 4902 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385419 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385430 4902 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385441 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385452 4902 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385463 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385474 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385484 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385495 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385507 4902 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385520 4902 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385541 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385557 4902 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385596 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385611 4902 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.385625 4902 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.388084 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 14:16:27 crc kubenswrapper[4902]: W1202 14:16:27.396116 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-a6d827022790b478d0d109a0b7b97a6f2e56f1d180447adfce11a1b874538886 WatchSource:0}: Error finding container a6d827022790b478d0d109a0b7b97a6f2e56f1d180447adfce11a1b874538886: Status 404 returned error can't find the container with id a6d827022790b478d0d109a0b7b97a6f2e56f1d180447adfce11a1b874538886 Dec 02 14:16:27 crc kubenswrapper[4902]: W1202 14:16:27.401430 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-9a0e5703bd93c4c177c3d5b2c2d588410afdc42ab8347494a5cfa318fb3d6ddf WatchSource:0}: Error finding container 9a0e5703bd93c4c177c3d5b2c2d588410afdc42ab8347494a5cfa318fb3d6ddf: Status 404 returned error can't find the container with id 9a0e5703bd93c4c177c3d5b2c2d588410afdc42ab8347494a5cfa318fb3d6ddf Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.672429 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.774509 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.779042 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.786902 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.795021 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:27Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.807922 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:27Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.824828 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:27Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.837005 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:27Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.853643 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:27Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.866207 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:27Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.876180 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:27Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.888662 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.888720 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.888743 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.888762 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.888800 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.888821 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:16:28.888801796 +0000 UTC m=+20.080110505 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.888888 4902 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.888919 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.888940 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.888951 4902 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.888899 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.888986 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.888989 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:28.888968951 +0000 UTC m=+20.080277710 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.888993 4902 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.888911 4902 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.889008 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:28.889000072 +0000 UTC m=+20.080308881 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.889100 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:28.889092045 +0000 UTC m=+20.080400754 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:27 crc kubenswrapper[4902]: E1202 14:16:27.889116 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:28.889111515 +0000 UTC m=+20.080420224 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.891241 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:27Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.905452 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:27Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.916953 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:27Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.927849 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:27Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.943839 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:27Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.955509 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:27Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.966085 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:27Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:27 crc kubenswrapper[4902]: I1202 14:16:27.978642 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:27Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.205854 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8"} Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.205910 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"4d03cdab8dfd88027c6efb5e1b22eb758ab794f23acc45b4437dd37774a6d446"} Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.206868 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"9a0e5703bd93c4c177c3d5b2c2d588410afdc42ab8347494a5cfa318fb3d6ddf"} Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.208531 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d"} Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.208607 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3"} Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.208625 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a6d827022790b478d0d109a0b7b97a6f2e56f1d180447adfce11a1b874538886"} Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.222891 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.224437 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.227611 4902 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45" exitCode=255 Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.227670 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45"} Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.228542 4902 scope.go:117] "RemoveContainer" containerID="2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.241901 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.257927 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.275709 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.291116 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.314107 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.328774 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.349366 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.367864 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.384275 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.399019 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.418072 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.431767 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.445156 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.457953 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.471992 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.899279 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.899364 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.899396 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.899422 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:28 crc kubenswrapper[4902]: I1202 14:16:28.899446 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:28 crc kubenswrapper[4902]: E1202 14:16:28.899527 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:16:30.899490934 +0000 UTC m=+22.090799673 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:16:28 crc kubenswrapper[4902]: E1202 14:16:28.899552 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:16:28 crc kubenswrapper[4902]: E1202 14:16:28.899594 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:16:28 crc kubenswrapper[4902]: E1202 14:16:28.899607 4902 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:28 crc kubenswrapper[4902]: E1202 14:16:28.899640 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:16:28 crc kubenswrapper[4902]: E1202 14:16:28.899658 4902 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:16:28 crc kubenswrapper[4902]: E1202 14:16:28.899663 4902 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:16:28 crc kubenswrapper[4902]: E1202 14:16:28.899678 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:30.899660728 +0000 UTC m=+22.090969447 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:28 crc kubenswrapper[4902]: E1202 14:16:28.899676 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:16:28 crc kubenswrapper[4902]: E1202 14:16:28.899850 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:30.899832033 +0000 UTC m=+22.091140782 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:16:28 crc kubenswrapper[4902]: E1202 14:16:28.899876 4902 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:28 crc kubenswrapper[4902]: E1202 14:16:28.899894 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:30.899882985 +0000 UTC m=+22.091191734 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:16:28 crc kubenswrapper[4902]: E1202 14:16:28.899934 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:30.899911556 +0000 UTC m=+22.091220335 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.106491 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.106615 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:29 crc kubenswrapper[4902]: E1202 14:16:29.106662 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:29 crc kubenswrapper[4902]: E1202 14:16:29.106761 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.106499 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:29 crc kubenswrapper[4902]: E1202 14:16:29.106892 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.110390 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.111446 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.113275 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.114317 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.115912 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.116506 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.117178 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.117716 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.118346 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.118841 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.119332 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.119974 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.120477 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.120991 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.121472 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.121967 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.122515 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.123121 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.123666 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.124218 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.124716 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.125225 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.126530 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.128713 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.129275 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.131420 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.133192 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.136296 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.137391 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.138743 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.140852 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.142023 4902 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.142251 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.144660 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.146781 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.147889 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.148664 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.150302 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.153012 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.153964 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.156089 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.157841 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.159330 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.162351 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.162417 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.163464 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.164919 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.167433 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.168294 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.169947 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.171219 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.172734 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.173601 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.174426 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.176438 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.177777 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.177898 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.179665 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.191862 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.208758 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.222994 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.232717 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.234422 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b"} Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.234978 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.241542 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.256364 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.271987 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.288386 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.303045 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.321141 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.337237 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.350549 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:29 crc kubenswrapper[4902]: I1202 14:16:29.360799 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.485423 4902 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.487543 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.487654 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.487678 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.487765 4902 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.498112 4902 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.498431 4902 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.500045 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.500089 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.500101 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.500116 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.500127 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:30Z","lastTransitionTime":"2025-12-02T14:16:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.529484 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.534722 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.534798 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.534821 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.534852 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.534876 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:30Z","lastTransitionTime":"2025-12-02T14:16:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.551548 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.556998 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.557063 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.557087 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.557138 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.557164 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:30Z","lastTransitionTime":"2025-12-02T14:16:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.578270 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.583769 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.583836 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.583860 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.583889 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.583911 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:30Z","lastTransitionTime":"2025-12-02T14:16:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.603616 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.608631 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.608682 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.608699 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.608721 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.608736 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:30Z","lastTransitionTime":"2025-12-02T14:16:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.625942 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.626085 4902 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.627948 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.627994 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.628012 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.628036 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.628054 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:30Z","lastTransitionTime":"2025-12-02T14:16:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.731127 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.731205 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.731227 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.731256 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.731307 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:30Z","lastTransitionTime":"2025-12-02T14:16:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.834814 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.834873 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.834893 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.834948 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.834968 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:30Z","lastTransitionTime":"2025-12-02T14:16:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.916179 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.916358 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.916414 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:16:34.91637789 +0000 UTC m=+26.107686639 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.916473 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.916499 4902 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.916542 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.916615 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.916645 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:34.916612337 +0000 UTC m=+26.107921106 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.916771 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.916783 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.916825 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.916834 4902 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.916970 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:34.916934836 +0000 UTC m=+26.108243585 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.916851 4902 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.917083 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:34.91706207 +0000 UTC m=+26.108370819 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.916794 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.917119 4902 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:30 crc kubenswrapper[4902]: E1202 14:16:30.917172 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:34.917157523 +0000 UTC m=+26.108466262 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.938075 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.938118 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.938129 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.938148 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:30 crc kubenswrapper[4902]: I1202 14:16:30.938160 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:30Z","lastTransitionTime":"2025-12-02T14:16:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.041882 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.041961 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.041984 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.042015 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.042034 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:31Z","lastTransitionTime":"2025-12-02T14:16:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.106670 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.106729 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.106703 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:31 crc kubenswrapper[4902]: E1202 14:16:31.107079 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:31 crc kubenswrapper[4902]: E1202 14:16:31.107275 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:31 crc kubenswrapper[4902]: E1202 14:16:31.107499 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.145236 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.145292 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.145312 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.145336 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.145354 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:31Z","lastTransitionTime":"2025-12-02T14:16:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.243151 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c"} Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.248040 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.248106 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.248127 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.248153 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.248184 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:31Z","lastTransitionTime":"2025-12-02T14:16:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.271171 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.294180 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.319460 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.343647 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.350445 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.350511 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.350530 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.350556 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.350602 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:31Z","lastTransitionTime":"2025-12-02T14:16:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.362821 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.377878 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.389788 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.390992 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.410064 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.411409 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.416928 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.429724 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [etcd]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [etcd]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.446262 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.453820 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.453866 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.453878 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.453905 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.453916 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:31Z","lastTransitionTime":"2025-12-02T14:16:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.459619 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.473610 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.492285 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.507708 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.525200 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.549136 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.556933 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.556971 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.556984 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.557004 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.557016 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:31Z","lastTransitionTime":"2025-12-02T14:16:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.564055 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.574716 4902 csr.go:261] certificate signing request csr-6b8cp is approved, waiting to be issued Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.581853 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.595358 4902 csr.go:257] certificate signing request csr-6b8cp is issued Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.625645 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.660152 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.660247 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.660270 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.660301 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.660325 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:31Z","lastTransitionTime":"2025-12-02T14:16:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.662883 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.693232 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.743908 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.763425 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.763479 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.763491 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.763510 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.763521 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:31Z","lastTransitionTime":"2025-12-02T14:16:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.764436 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.780424 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.797948 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.810434 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:31Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.865218 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.865262 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.865273 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.865293 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.865304 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:31Z","lastTransitionTime":"2025-12-02T14:16:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.967126 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.967163 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.967174 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.967193 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:31 crc kubenswrapper[4902]: I1202 14:16:31.967206 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:31Z","lastTransitionTime":"2025-12-02T14:16:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.069748 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.069795 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.069808 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.069827 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.069841 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:32Z","lastTransitionTime":"2025-12-02T14:16:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.172587 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.172623 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.172633 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.172650 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.172662 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:32Z","lastTransitionTime":"2025-12-02T14:16:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.274944 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.274979 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.274988 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.275002 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.275011 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:32Z","lastTransitionTime":"2025-12-02T14:16:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.377074 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.377106 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.377115 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.377132 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.377143 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:32Z","lastTransitionTime":"2025-12-02T14:16:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.478840 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.478900 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.478912 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.478929 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.478940 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:32Z","lastTransitionTime":"2025-12-02T14:16:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.581406 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.581449 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.581460 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.581477 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.581489 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:32Z","lastTransitionTime":"2025-12-02T14:16:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.596820 4902 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-02 14:11:31 +0000 UTC, rotation deadline is 2026-09-08 19:08:27.939084991 +0000 UTC Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.596851 4902 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6724h51m55.342236531s for next certificate rotation Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.683713 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.683756 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.683766 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.683783 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.683794 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:32Z","lastTransitionTime":"2025-12-02T14:16:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.701281 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-v8znh"] Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.701905 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-qhrkh"] Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.702409 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.702883 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-jm696"] Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.704000 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-qhrkh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.705526 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.716899 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-vm9q6"] Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.717321 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: W1202 14:16:32.718100 4902 reflector.go:561] object-"openshift-machine-config-operator"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 02 14:16:32 crc kubenswrapper[4902]: E1202 14:16:32.718144 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:16:32 crc kubenswrapper[4902]: W1202 14:16:32.718256 4902 reflector.go:561] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": failed to list *v1.Secret: secrets "machine-config-daemon-dockercfg-r5tcq" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 02 14:16:32 crc kubenswrapper[4902]: E1202 14:16:32.718279 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"machine-config-daemon-dockercfg-r5tcq\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-config-daemon-dockercfg-r5tcq\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:16:32 crc kubenswrapper[4902]: W1202 14:16:32.718377 4902 reflector.go:561] object-"openshift-multus"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 02 14:16:32 crc kubenswrapper[4902]: E1202 14:16:32.718394 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:16:32 crc kubenswrapper[4902]: W1202 14:16:32.718448 4902 reflector.go:561] object-"openshift-multus"/"cni-copy-resources": failed to list *v1.ConfigMap: configmaps "cni-copy-resources" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 02 14:16:32 crc kubenswrapper[4902]: E1202 14:16:32.718462 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"cni-copy-resources\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"cni-copy-resources\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:16:32 crc kubenswrapper[4902]: W1202 14:16:32.718512 4902 reflector.go:561] object-"openshift-multus"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 02 14:16:32 crc kubenswrapper[4902]: E1202 14:16:32.718527 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:16:32 crc kubenswrapper[4902]: W1202 14:16:32.718725 4902 reflector.go:561] object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": failed to list *v1.Secret: secrets "multus-ancillary-tools-dockercfg-vnmsz" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 02 14:16:32 crc kubenswrapper[4902]: E1202 14:16:32.718749 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-ancillary-tools-dockercfg-vnmsz\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"multus-ancillary-tools-dockercfg-vnmsz\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:16:32 crc kubenswrapper[4902]: W1202 14:16:32.718829 4902 reflector.go:561] object-"openshift-dns"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Dec 02 14:16:32 crc kubenswrapper[4902]: E1202 14:16:32.718846 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:16:32 crc kubenswrapper[4902]: W1202 14:16:32.718891 4902 reflector.go:561] object-"openshift-machine-config-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 02 14:16:32 crc kubenswrapper[4902]: E1202 14:16:32.718907 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:16:32 crc kubenswrapper[4902]: W1202 14:16:32.718947 4902 reflector.go:561] object-"openshift-multus"/"default-cni-sysctl-allowlist": failed to list *v1.ConfigMap: configmaps "default-cni-sysctl-allowlist" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 02 14:16:32 crc kubenswrapper[4902]: E1202 14:16:32.719023 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-cni-sysctl-allowlist\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"default-cni-sysctl-allowlist\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:16:32 crc kubenswrapper[4902]: W1202 14:16:32.719615 4902 reflector.go:561] object-"openshift-multus"/"default-dockercfg-2q5b6": failed to list *v1.Secret: secrets "default-dockercfg-2q5b6" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 02 14:16:32 crc kubenswrapper[4902]: E1202 14:16:32.719641 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-dockercfg-2q5b6\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-dockercfg-2q5b6\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:16:32 crc kubenswrapper[4902]: W1202 14:16:32.719688 4902 reflector.go:561] object-"openshift-dns"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Dec 02 14:16:32 crc kubenswrapper[4902]: E1202 14:16:32.719703 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:16:32 crc kubenswrapper[4902]: W1202 14:16:32.719739 4902 reflector.go:561] object-"openshift-machine-config-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 02 14:16:32 crc kubenswrapper[4902]: E1202 14:16:32.719754 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:16:32 crc kubenswrapper[4902]: W1202 14:16:32.719823 4902 reflector.go:561] object-"openshift-machine-config-operator"/"proxy-tls": failed to list *v1.Secret: secrets "proxy-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 02 14:16:32 crc kubenswrapper[4902]: E1202 14:16:32.719840 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"proxy-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"proxy-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:16:32 crc kubenswrapper[4902]: W1202 14:16:32.719918 4902 reflector.go:561] object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": failed to list *v1.Secret: secrets "node-resolver-dockercfg-kz9s7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Dec 02 14:16:32 crc kubenswrapper[4902]: E1202 14:16:32.719934 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"node-resolver-dockercfg-kz9s7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"node-resolver-dockercfg-kz9s7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:16:32 crc kubenswrapper[4902]: W1202 14:16:32.722991 4902 reflector.go:561] object-"openshift-multus"/"multus-daemon-config": failed to list *v1.ConfigMap: configmaps "multus-daemon-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 02 14:16:32 crc kubenswrapper[4902]: E1202 14:16:32.723038 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-daemon-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"multus-daemon-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.739453 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.754072 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.768662 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.786533 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.786588 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.786598 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.786615 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.786625 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:32Z","lastTransitionTime":"2025-12-02T14:16:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.788721 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.812842 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.831946 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834165 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e4c95c0d-fa60-4166-beff-455723877688-os-release\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834215 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e4c95c0d-fa60-4166-beff-455723877688-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834246 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wxbj\" (UniqueName: \"kubernetes.io/projected/c78ba9cf-533f-4683-8531-045256a5d819-kube-api-access-4wxbj\") pod \"machine-config-daemon-v8znh\" (UID: \"c78ba9cf-533f-4683-8531-045256a5d819\") " pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834289 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/381fdb33-d71e-468a-9b1e-a2920c32f8ae-cni-binary-copy\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834317 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c78ba9cf-533f-4683-8531-045256a5d819-proxy-tls\") pod \"machine-config-daemon-v8znh\" (UID: \"c78ba9cf-533f-4683-8531-045256a5d819\") " pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834340 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/381fdb33-d71e-468a-9b1e-a2920c32f8ae-multus-daemon-config\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834375 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e4c95c0d-fa60-4166-beff-455723877688-system-cni-dir\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834460 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-cnibin\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834555 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e4c95c0d-fa60-4166-beff-455723877688-cnibin\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834636 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-multus-conf-dir\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834661 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjr87\" (UniqueName: \"kubernetes.io/projected/381fdb33-d71e-468a-9b1e-a2920c32f8ae-kube-api-access-pjr87\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834690 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e4c95c0d-fa60-4166-beff-455723877688-tuning-conf-dir\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834713 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-os-release\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834740 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-var-lib-cni-bin\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834761 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-var-lib-cni-multus\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834785 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq8gs\" (UniqueName: \"kubernetes.io/projected/e4c95c0d-fa60-4166-beff-455723877688-kube-api-access-sq8gs\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834809 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-etc-kubernetes\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834849 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/66d4b493-68d5-4caa-b053-eb1858dd41d1-hosts-file\") pod \"node-resolver-qhrkh\" (UID: \"66d4b493-68d5-4caa-b053-eb1858dd41d1\") " pod="openshift-dns/node-resolver-qhrkh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834878 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e4c95c0d-fa60-4166-beff-455723877688-cni-binary-copy\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834912 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-run-netns\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834944 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-system-cni-dir\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.834978 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-multus-socket-dir-parent\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.835013 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c78ba9cf-533f-4683-8531-045256a5d819-mcd-auth-proxy-config\") pod \"machine-config-daemon-v8znh\" (UID: \"c78ba9cf-533f-4683-8531-045256a5d819\") " pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.835029 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-run-k8s-cni-cncf-io\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.835044 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-hostroot\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.835065 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-run-multus-certs\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.835091 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c78ba9cf-533f-4683-8531-045256a5d819-rootfs\") pod \"machine-config-daemon-v8znh\" (UID: \"c78ba9cf-533f-4683-8531-045256a5d819\") " pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.835118 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-multus-cni-dir\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.835212 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-var-lib-kubelet\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.835293 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph468\" (UniqueName: \"kubernetes.io/projected/66d4b493-68d5-4caa-b053-eb1858dd41d1-kube-api-access-ph468\") pod \"node-resolver-qhrkh\" (UID: \"66d4b493-68d5-4caa-b053-eb1858dd41d1\") " pod="openshift-dns/node-resolver-qhrkh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.852442 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.864046 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.874674 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.885434 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.888153 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.888189 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.888198 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.888215 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.888224 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:32Z","lastTransitionTime":"2025-12-02T14:16:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.923271 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.936555 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e4c95c0d-fa60-4166-beff-455723877688-tuning-conf-dir\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.936628 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjr87\" (UniqueName: \"kubernetes.io/projected/381fdb33-d71e-468a-9b1e-a2920c32f8ae-kube-api-access-pjr87\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.936648 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-os-release\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.936675 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-var-lib-cni-bin\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.936691 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-var-lib-cni-multus\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.936710 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-etc-kubernetes\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.936731 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/66d4b493-68d5-4caa-b053-eb1858dd41d1-hosts-file\") pod \"node-resolver-qhrkh\" (UID: \"66d4b493-68d5-4caa-b053-eb1858dd41d1\") " pod="openshift-dns/node-resolver-qhrkh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.936749 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e4c95c0d-fa60-4166-beff-455723877688-cni-binary-copy\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.936774 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq8gs\" (UniqueName: \"kubernetes.io/projected/e4c95c0d-fa60-4166-beff-455723877688-kube-api-access-sq8gs\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.936801 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-run-netns\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.936848 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-os-release\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.936895 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-etc-kubernetes\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.936891 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/66d4b493-68d5-4caa-b053-eb1858dd41d1-hosts-file\") pod \"node-resolver-qhrkh\" (UID: \"66d4b493-68d5-4caa-b053-eb1858dd41d1\") " pod="openshift-dns/node-resolver-qhrkh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.936872 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-var-lib-cni-multus\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.936990 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-multus-socket-dir-parent\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937052 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-run-netns\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937097 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c78ba9cf-533f-4683-8531-045256a5d819-mcd-auth-proxy-config\") pod \"machine-config-daemon-v8znh\" (UID: \"c78ba9cf-533f-4683-8531-045256a5d819\") " pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937092 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-var-lib-cni-bin\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937105 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-multus-socket-dir-parent\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937160 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-system-cni-dir\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937183 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e4c95c0d-fa60-4166-beff-455723877688-tuning-conf-dir\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937239 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-system-cni-dir\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937243 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-hostroot\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937278 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-hostroot\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937333 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-run-multus-certs\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937364 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c78ba9cf-533f-4683-8531-045256a5d819-rootfs\") pod \"machine-config-daemon-v8znh\" (UID: \"c78ba9cf-533f-4683-8531-045256a5d819\") " pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937407 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-run-multus-certs\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937427 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c78ba9cf-533f-4683-8531-045256a5d819-rootfs\") pod \"machine-config-daemon-v8znh\" (UID: \"c78ba9cf-533f-4683-8531-045256a5d819\") " pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937433 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-multus-cni-dir\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937467 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-run-k8s-cni-cncf-io\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937506 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph468\" (UniqueName: \"kubernetes.io/projected/66d4b493-68d5-4caa-b053-eb1858dd41d1-kube-api-access-ph468\") pod \"node-resolver-qhrkh\" (UID: \"66d4b493-68d5-4caa-b053-eb1858dd41d1\") " pod="openshift-dns/node-resolver-qhrkh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937522 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-var-lib-kubelet\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937542 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e4c95c0d-fa60-4166-beff-455723877688-os-release\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937584 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wxbj\" (UniqueName: \"kubernetes.io/projected/c78ba9cf-533f-4683-8531-045256a5d819-kube-api-access-4wxbj\") pod \"machine-config-daemon-v8znh\" (UID: \"c78ba9cf-533f-4683-8531-045256a5d819\") " pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937601 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/381fdb33-d71e-468a-9b1e-a2920c32f8ae-cni-binary-copy\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937619 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-multus-cni-dir\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937619 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e4c95c0d-fa60-4166-beff-455723877688-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937664 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/381fdb33-d71e-468a-9b1e-a2920c32f8ae-multus-daemon-config\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937707 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e4c95c0d-fa60-4166-beff-455723877688-system-cni-dir\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937727 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c78ba9cf-533f-4683-8531-045256a5d819-proxy-tls\") pod \"machine-config-daemon-v8znh\" (UID: \"c78ba9cf-533f-4683-8531-045256a5d819\") " pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937735 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-run-k8s-cni-cncf-io\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937751 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e4c95c0d-fa60-4166-beff-455723877688-cnibin\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937774 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-cnibin\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937795 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-multus-conf-dir\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937862 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-multus-conf-dir\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.937939 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e4c95c0d-fa60-4166-beff-455723877688-system-cni-dir\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.938028 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e4c95c0d-fa60-4166-beff-455723877688-cnibin\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.938065 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-host-var-lib-kubelet\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.938071 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/381fdb33-d71e-468a-9b1e-a2920c32f8ae-cnibin\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.938353 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e4c95c0d-fa60-4166-beff-455723877688-os-release\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.946706 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.966429 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.979203 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.990472 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.990494 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.990503 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.990517 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.990526 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:32Z","lastTransitionTime":"2025-12-02T14:16:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:32 crc kubenswrapper[4902]: I1202 14:16:32.991683 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.008664 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.019587 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.029328 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.045277 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.064356 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-q55jp"] Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.065145 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.067140 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.067532 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.067870 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.068135 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.068312 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.068815 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.070348 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.072626 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.089654 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.093225 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.093257 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.093266 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.093281 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.093290 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:33Z","lastTransitionTime":"2025-12-02T14:16:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.102837 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.105713 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.105742 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.105808 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.105754 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.105899 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.105980 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.116546 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.128777 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140023 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-node-log\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140062 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovnkube-script-lib\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140081 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-etc-openvswitch\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140123 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-slash\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140137 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-ovn\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140153 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovn-node-metrics-cert\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140169 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-systemd\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140202 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-run-netns\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140221 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-cni-bin\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140239 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-systemd-units\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140292 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-cni-netd\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140343 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6g4l5\" (UniqueName: \"kubernetes.io/projected/9379a49e-d66c-4224-a564-f00d4cadd9ea-kube-api-access-6g4l5\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140375 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-log-socket\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140395 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovnkube-config\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140416 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-env-overrides\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140439 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-kubelet\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140460 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-var-lib-openvswitch\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140482 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-openvswitch\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140505 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-run-ovn-kubernetes\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.140627 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.142748 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.154907 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.166530 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.178873 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.192671 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.195184 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.195219 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.195232 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.195248 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.195261 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:33Z","lastTransitionTime":"2025-12-02T14:16:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.203932 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.215017 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.234637 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241359 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovnkube-script-lib\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241405 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-node-log\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241429 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-etc-openvswitch\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241474 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-slash\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241493 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-ovn\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241515 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovn-node-metrics-cert\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241549 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-systemd\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241577 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-etc-openvswitch\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241610 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-run-netns\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241615 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-slash\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241639 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-ovn\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241664 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-systemd\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241556 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-node-log\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241633 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-cni-bin\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241719 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-run-netns\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241759 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6g4l5\" (UniqueName: \"kubernetes.io/projected/9379a49e-d66c-4224-a564-f00d4cadd9ea-kube-api-access-6g4l5\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241792 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-systemd-units\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241685 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-cni-bin\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241815 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-cni-netd\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241840 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-cni-netd\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241854 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-systemd-units\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241861 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-log-socket\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241886 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-log-socket\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241889 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovnkube-config\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241925 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-env-overrides\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241948 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-openvswitch\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241965 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-run-ovn-kubernetes\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241982 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-kubelet\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.241996 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-var-lib-openvswitch\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.242012 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-openvswitch\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.242043 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-var-lib-openvswitch\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.242066 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-run-ovn-kubernetes\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.242081 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.242106 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.242087 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-kubelet\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.242219 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovnkube-script-lib\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.242627 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovnkube-config\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.242675 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-env-overrides\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.245894 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovn-node-metrics-cert\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.259377 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.262213 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6g4l5\" (UniqueName: \"kubernetes.io/projected/9379a49e-d66c-4224-a564-f00d4cadd9ea-kube-api-access-6g4l5\") pod \"ovnkube-node-q55jp\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.274656 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.293296 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.297122 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.297164 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.297174 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.297190 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.297201 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:33Z","lastTransitionTime":"2025-12-02T14:16:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.306248 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.316161 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:33Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.376093 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:33 crc kubenswrapper[4902]: W1202 14:16:33.390237 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9379a49e_d66c_4224_a564_f00d4cadd9ea.slice/crio-744eec9ba69a27be30b03927bbd1d12fbda0909ae1a2d6492c91bd32b0b22bf6 WatchSource:0}: Error finding container 744eec9ba69a27be30b03927bbd1d12fbda0909ae1a2d6492c91bd32b0b22bf6: Status 404 returned error can't find the container with id 744eec9ba69a27be30b03927bbd1d12fbda0909ae1a2d6492c91bd32b0b22bf6 Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.398889 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.398922 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.398933 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.398948 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.398961 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:33Z","lastTransitionTime":"2025-12-02T14:16:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.501184 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.501496 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.501513 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.501528 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.501539 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:33Z","lastTransitionTime":"2025-12-02T14:16:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.588944 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.590341 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.598708 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e4c95c0d-fa60-4166-beff-455723877688-cni-binary-copy\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.599694 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/381fdb33-d71e-468a-9b1e-a2920c32f8ae-cni-binary-copy\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.604217 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.604264 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.604281 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.604304 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.604322 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:33Z","lastTransitionTime":"2025-12-02T14:16:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.676193 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.706851 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.706907 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.706919 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.706937 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.706953 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:33Z","lastTransitionTime":"2025-12-02T14:16:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.712597 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.724391 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e4c95c0d-fa60-4166-beff-455723877688-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.809193 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.809247 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.809264 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.809287 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.809306 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:33Z","lastTransitionTime":"2025-12-02T14:16:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.888218 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.911882 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.911936 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.911952 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.911975 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.911993 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:33Z","lastTransitionTime":"2025-12-02T14:16:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.912551 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.938078 4902 configmap.go:193] Couldn't get configMap openshift-machine-config-operator/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.938113 4902 secret.go:188] Couldn't get secret openshift-machine-config-operator/proxy-tls: failed to sync secret cache: timed out waiting for the condition Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.938190 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c78ba9cf-533f-4683-8531-045256a5d819-proxy-tls podName:c78ba9cf-533f-4683-8531-045256a5d819 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:34.438164137 +0000 UTC m=+25.629472856 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/c78ba9cf-533f-4683-8531-045256a5d819-proxy-tls") pod "machine-config-daemon-v8znh" (UID: "c78ba9cf-533f-4683-8531-045256a5d819") : failed to sync secret cache: timed out waiting for the condition Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.938212 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c78ba9cf-533f-4683-8531-045256a5d819-mcd-auth-proxy-config podName:c78ba9cf-533f-4683-8531-045256a5d819 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:34.438202458 +0000 UTC m=+25.629511177 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "mcd-auth-proxy-config" (UniqueName: "kubernetes.io/configmap/c78ba9cf-533f-4683-8531-045256a5d819-mcd-auth-proxy-config") pod "machine-config-daemon-v8znh" (UID: "c78ba9cf-533f-4683-8531-045256a5d819") : failed to sync configmap cache: timed out waiting for the condition Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.938209 4902 configmap.go:193] Couldn't get configMap openshift-multus/multus-daemon-config: failed to sync configmap cache: timed out waiting for the condition Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.938312 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/381fdb33-d71e-468a-9b1e-a2920c32f8ae-multus-daemon-config podName:381fdb33-d71e-468a-9b1e-a2920c32f8ae nodeName:}" failed. No retries permitted until 2025-12-02 14:16:34.43829028 +0000 UTC m=+25.629598999 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "multus-daemon-config" (UniqueName: "kubernetes.io/configmap/381fdb33-d71e-468a-9b1e-a2920c32f8ae-multus-daemon-config") pod "multus-vm9q6" (UID: "381fdb33-d71e-468a-9b1e-a2920c32f8ae") : failed to sync configmap cache: timed out waiting for the condition Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.943400 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.949798 4902 projected.go:288] Couldn't get configMap openshift-multus/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.949847 4902 projected.go:194] Error preparing data for projected volume kube-api-access-sq8gs for pod openshift-multus/multus-additional-cni-plugins-jm696: failed to sync configmap cache: timed out waiting for the condition Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.949911 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e4c95c0d-fa60-4166-beff-455723877688-kube-api-access-sq8gs podName:e4c95c0d-fa60-4166-beff-455723877688 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:34.449892525 +0000 UTC m=+25.641201244 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-sq8gs" (UniqueName: "kubernetes.io/projected/e4c95c0d-fa60-4166-beff-455723877688-kube-api-access-sq8gs") pod "multus-additional-cni-plugins-jm696" (UID: "e4c95c0d-fa60-4166-beff-455723877688") : failed to sync configmap cache: timed out waiting for the condition Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.955459 4902 projected.go:288] Couldn't get configMap openshift-machine-config-operator/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.960861 4902 projected.go:288] Couldn't get configMap openshift-multus/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.960893 4902 projected.go:194] Error preparing data for projected volume kube-api-access-pjr87 for pod openshift-multus/multus-vm9q6: failed to sync configmap cache: timed out waiting for the condition Dec 02 14:16:33 crc kubenswrapper[4902]: E1202 14:16:33.960959 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/381fdb33-d71e-468a-9b1e-a2920c32f8ae-kube-api-access-pjr87 podName:381fdb33-d71e-468a-9b1e-a2920c32f8ae nodeName:}" failed. No retries permitted until 2025-12-02 14:16:34.460935134 +0000 UTC m=+25.652243953 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-pjr87" (UniqueName: "kubernetes.io/projected/381fdb33-d71e-468a-9b1e-a2920c32f8ae-kube-api-access-pjr87") pod "multus-vm9q6" (UID: "381fdb33-d71e-468a-9b1e-a2920c32f8ae") : failed to sync configmap cache: timed out waiting for the condition Dec 02 14:16:33 crc kubenswrapper[4902]: I1202 14:16:33.972531 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.014120 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.014179 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.014203 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.014224 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.014239 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:34Z","lastTransitionTime":"2025-12-02T14:16:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.061836 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.101245 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.116620 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.116668 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.116680 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.116696 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.116706 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:34Z","lastTransitionTime":"2025-12-02T14:16:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.143638 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 02 14:16:34 crc kubenswrapper[4902]: E1202 14:16:34.145619 4902 projected.go:194] Error preparing data for projected volume kube-api-access-4wxbj for pod openshift-machine-config-operator/machine-config-daemon-v8znh: failed to sync configmap cache: timed out waiting for the condition Dec 02 14:16:34 crc kubenswrapper[4902]: E1202 14:16:34.145710 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c78ba9cf-533f-4683-8531-045256a5d819-kube-api-access-4wxbj podName:c78ba9cf-533f-4683-8531-045256a5d819 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:34.64568176 +0000 UTC m=+25.836990479 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-4wxbj" (UniqueName: "kubernetes.io/projected/c78ba9cf-533f-4683-8531-045256a5d819-kube-api-access-4wxbj") pod "machine-config-daemon-v8znh" (UID: "c78ba9cf-533f-4683-8531-045256a5d819") : failed to sync configmap cache: timed out waiting for the condition Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.162647 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.168968 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.217151 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.218313 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.218341 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.218350 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.218365 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.218379 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:34Z","lastTransitionTime":"2025-12-02T14:16:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.250060 4902 generic.go:334] "Generic (PLEG): container finished" podID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerID="92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7" exitCode=0 Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.250102 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerDied","Data":"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7"} Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.250129 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerStarted","Data":"744eec9ba69a27be30b03927bbd1d12fbda0909ae1a2d6492c91bd32b0b22bf6"} Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.274051 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:34Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.282767 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.288786 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:34Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.293206 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph468\" (UniqueName: \"kubernetes.io/projected/66d4b493-68d5-4caa-b053-eb1858dd41d1-kube-api-access-ph468\") pod \"node-resolver-qhrkh\" (UID: \"66d4b493-68d5-4caa-b053-eb1858dd41d1\") " pod="openshift-dns/node-resolver-qhrkh" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.312284 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:34Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.320916 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.320948 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.320956 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.320970 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.320979 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:34Z","lastTransitionTime":"2025-12-02T14:16:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.331001 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:34Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.341520 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:34Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.357768 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:34Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.377855 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:34Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.393187 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:34Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.407417 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:34Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.422208 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:34Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.422506 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.422545 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.422555 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.422590 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.422600 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:34Z","lastTransitionTime":"2025-12-02T14:16:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.437651 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:34Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.454997 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c78ba9cf-533f-4683-8531-045256a5d819-proxy-tls\") pod \"machine-config-daemon-v8znh\" (UID: \"c78ba9cf-533f-4683-8531-045256a5d819\") " pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.455045 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/381fdb33-d71e-468a-9b1e-a2920c32f8ae-multus-daemon-config\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.455082 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq8gs\" (UniqueName: \"kubernetes.io/projected/e4c95c0d-fa60-4166-beff-455723877688-kube-api-access-sq8gs\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.455118 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c78ba9cf-533f-4683-8531-045256a5d819-mcd-auth-proxy-config\") pod \"machine-config-daemon-v8znh\" (UID: \"c78ba9cf-533f-4683-8531-045256a5d819\") " pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.455970 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c78ba9cf-533f-4683-8531-045256a5d819-mcd-auth-proxy-config\") pod \"machine-config-daemon-v8znh\" (UID: \"c78ba9cf-533f-4683-8531-045256a5d819\") " pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.455994 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/381fdb33-d71e-468a-9b1e-a2920c32f8ae-multus-daemon-config\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.457076 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:34Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.460157 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq8gs\" (UniqueName: \"kubernetes.io/projected/e4c95c0d-fa60-4166-beff-455723877688-kube-api-access-sq8gs\") pod \"multus-additional-cni-plugins-jm696\" (UID: \"e4c95c0d-fa60-4166-beff-455723877688\") " pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.461006 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c78ba9cf-533f-4683-8531-045256a5d819-proxy-tls\") pod \"machine-config-daemon-v8znh\" (UID: \"c78ba9cf-533f-4683-8531-045256a5d819\") " pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.469076 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:34Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.494378 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:34Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.525159 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.525202 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.525213 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.525232 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.525244 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:34Z","lastTransitionTime":"2025-12-02T14:16:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.541901 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-qhrkh" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.551742 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-jm696" Dec 02 14:16:34 crc kubenswrapper[4902]: W1202 14:16:34.551742 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66d4b493_68d5_4caa_b053_eb1858dd41d1.slice/crio-bb947117fe76c49ae0514accbbe68a46d3369c4983537464cd7c49038697bc20 WatchSource:0}: Error finding container bb947117fe76c49ae0514accbbe68a46d3369c4983537464cd7c49038697bc20: Status 404 returned error can't find the container with id bb947117fe76c49ae0514accbbe68a46d3369c4983537464cd7c49038697bc20 Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.555582 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjr87\" (UniqueName: \"kubernetes.io/projected/381fdb33-d71e-468a-9b1e-a2920c32f8ae-kube-api-access-pjr87\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.559117 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjr87\" (UniqueName: \"kubernetes.io/projected/381fdb33-d71e-468a-9b1e-a2920c32f8ae-kube-api-access-pjr87\") pod \"multus-vm9q6\" (UID: \"381fdb33-d71e-468a-9b1e-a2920c32f8ae\") " pod="openshift-multus/multus-vm9q6" Dec 02 14:16:34 crc kubenswrapper[4902]: W1202 14:16:34.584579 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4c95c0d_fa60_4166_beff_455723877688.slice/crio-458f5033ea89f4111474d784efc485f4cf0954c6512c077065df2ac55d7d7573 WatchSource:0}: Error finding container 458f5033ea89f4111474d784efc485f4cf0954c6512c077065df2ac55d7d7573: Status 404 returned error can't find the container with id 458f5033ea89f4111474d784efc485f4cf0954c6512c077065df2ac55d7d7573 Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.627555 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.627610 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.627620 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.627636 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.627645 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:34Z","lastTransitionTime":"2025-12-02T14:16:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.656901 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wxbj\" (UniqueName: \"kubernetes.io/projected/c78ba9cf-533f-4683-8531-045256a5d819-kube-api-access-4wxbj\") pod \"machine-config-daemon-v8znh\" (UID: \"c78ba9cf-533f-4683-8531-045256a5d819\") " pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.660403 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wxbj\" (UniqueName: \"kubernetes.io/projected/c78ba9cf-533f-4683-8531-045256a5d819-kube-api-access-4wxbj\") pod \"machine-config-daemon-v8znh\" (UID: \"c78ba9cf-533f-4683-8531-045256a5d819\") " pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.730169 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.730478 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.730487 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.730501 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.730510 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:34Z","lastTransitionTime":"2025-12-02T14:16:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.828010 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.832428 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.832453 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.832461 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.832473 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.832612 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:34Z","lastTransitionTime":"2025-12-02T14:16:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.858086 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vm9q6" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.934737 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.934780 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.934791 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.934811 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.934823 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:34Z","lastTransitionTime":"2025-12-02T14:16:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.959801 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.959915 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:34 crc kubenswrapper[4902]: E1202 14:16:34.959937 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:16:42.959920004 +0000 UTC m=+34.151228713 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.959972 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.960022 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:34 crc kubenswrapper[4902]: I1202 14:16:34.960063 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:34 crc kubenswrapper[4902]: E1202 14:16:34.960098 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:16:34 crc kubenswrapper[4902]: E1202 14:16:34.960112 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:16:34 crc kubenswrapper[4902]: E1202 14:16:34.960122 4902 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:34 crc kubenswrapper[4902]: E1202 14:16:34.960157 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:42.960149581 +0000 UTC m=+34.151458290 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:34 crc kubenswrapper[4902]: E1202 14:16:34.960179 4902 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:16:34 crc kubenswrapper[4902]: E1202 14:16:34.960233 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:42.960216083 +0000 UTC m=+34.151524832 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:16:34 crc kubenswrapper[4902]: E1202 14:16:34.960176 4902 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:16:34 crc kubenswrapper[4902]: E1202 14:16:34.960259 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:16:34 crc kubenswrapper[4902]: E1202 14:16:34.960318 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:16:34 crc kubenswrapper[4902]: E1202 14:16:34.960343 4902 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:34 crc kubenswrapper[4902]: E1202 14:16:34.960284 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:42.960272544 +0000 UTC m=+34.151581283 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:16:34 crc kubenswrapper[4902]: E1202 14:16:34.960425 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:42.960399038 +0000 UTC m=+34.151707787 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.037949 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.038026 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.038045 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.038070 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.038091 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:35Z","lastTransitionTime":"2025-12-02T14:16:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.106655 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.106711 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.106678 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:35 crc kubenswrapper[4902]: E1202 14:16:35.106827 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:35 crc kubenswrapper[4902]: E1202 14:16:35.106948 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:35 crc kubenswrapper[4902]: E1202 14:16:35.107029 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.140422 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.140465 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.140473 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.140488 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.140497 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:35Z","lastTransitionTime":"2025-12-02T14:16:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.242745 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.242788 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.242798 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.242814 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.242824 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:35Z","lastTransitionTime":"2025-12-02T14:16:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.243961 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-8tnr5"] Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.244303 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-8tnr5" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.246110 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.247160 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.249144 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.250175 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.254808 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-qhrkh" event={"ID":"66d4b493-68d5-4caa-b053-eb1858dd41d1","Type":"ContainerStarted","Data":"bb947117fe76c49ae0514accbbe68a46d3369c4983537464cd7c49038697bc20"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.255610 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" event={"ID":"e4c95c0d-fa60-4166-beff-455723877688","Type":"ContainerStarted","Data":"458f5033ea89f4111474d784efc485f4cf0954c6512c077065df2ac55d7d7573"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.258165 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerStarted","Data":"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.258196 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerStarted","Data":"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.258210 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerStarted","Data":"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.258222 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerStarted","Data":"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.259334 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vm9q6" event={"ID":"381fdb33-d71e-468a-9b1e-a2920c32f8ae","Type":"ContainerStarted","Data":"b9591089eb9d8f5489662524f1498469fe97229b5de6ee619b5baafdc49d7f9f"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.260280 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"3e700533b5c79bf806fa052f219112403cde540c97d48e8a3fe2ab432048283a"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.276338 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:35Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.288965 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:35Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.306632 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:35Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.319211 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:35Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.329004 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:35Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.337841 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:35Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.344976 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.345004 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.345015 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.345031 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.345043 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:35Z","lastTransitionTime":"2025-12-02T14:16:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.354686 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:35Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.363011 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrh2h\" (UniqueName: \"kubernetes.io/projected/a4e841e0-3d80-41c7-b03b-672d129caddd-kube-api-access-xrh2h\") pod \"node-ca-8tnr5\" (UID: \"a4e841e0-3d80-41c7-b03b-672d129caddd\") " pod="openshift-image-registry/node-ca-8tnr5" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.363054 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a4e841e0-3d80-41c7-b03b-672d129caddd-host\") pod \"node-ca-8tnr5\" (UID: \"a4e841e0-3d80-41c7-b03b-672d129caddd\") " pod="openshift-image-registry/node-ca-8tnr5" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.363080 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a4e841e0-3d80-41c7-b03b-672d129caddd-serviceca\") pod \"node-ca-8tnr5\" (UID: \"a4e841e0-3d80-41c7-b03b-672d129caddd\") " pod="openshift-image-registry/node-ca-8tnr5" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.367763 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:35Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.378018 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:35Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.392730 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:35Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.410645 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:35Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.425795 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:35Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.437654 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:35Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.447122 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.447146 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.447154 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.447166 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.447175 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:35Z","lastTransitionTime":"2025-12-02T14:16:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.450534 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:35Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.464120 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a4e841e0-3d80-41c7-b03b-672d129caddd-host\") pod \"node-ca-8tnr5\" (UID: \"a4e841e0-3d80-41c7-b03b-672d129caddd\") " pod="openshift-image-registry/node-ca-8tnr5" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.464217 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a4e841e0-3d80-41c7-b03b-672d129caddd-serviceca\") pod \"node-ca-8tnr5\" (UID: \"a4e841e0-3d80-41c7-b03b-672d129caddd\") " pod="openshift-image-registry/node-ca-8tnr5" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.464279 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrh2h\" (UniqueName: \"kubernetes.io/projected/a4e841e0-3d80-41c7-b03b-672d129caddd-kube-api-access-xrh2h\") pod \"node-ca-8tnr5\" (UID: \"a4e841e0-3d80-41c7-b03b-672d129caddd\") " pod="openshift-image-registry/node-ca-8tnr5" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.464311 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a4e841e0-3d80-41c7-b03b-672d129caddd-host\") pod \"node-ca-8tnr5\" (UID: \"a4e841e0-3d80-41c7-b03b-672d129caddd\") " pod="openshift-image-registry/node-ca-8tnr5" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.465423 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a4e841e0-3d80-41c7-b03b-672d129caddd-serviceca\") pod \"node-ca-8tnr5\" (UID: \"a4e841e0-3d80-41c7-b03b-672d129caddd\") " pod="openshift-image-registry/node-ca-8tnr5" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.471491 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:35Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.491615 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrh2h\" (UniqueName: \"kubernetes.io/projected/a4e841e0-3d80-41c7-b03b-672d129caddd-kube-api-access-xrh2h\") pod \"node-ca-8tnr5\" (UID: \"a4e841e0-3d80-41c7-b03b-672d129caddd\") " pod="openshift-image-registry/node-ca-8tnr5" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.550512 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.550595 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.550607 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.550626 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.550641 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:35Z","lastTransitionTime":"2025-12-02T14:16:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.561221 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-8tnr5" Dec 02 14:16:35 crc kubenswrapper[4902]: W1202 14:16:35.576972 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4e841e0_3d80_41c7_b03b_672d129caddd.slice/crio-0c7e5bb84105bbe0b0605076f5946942eca90b0d8bed1a1c2aebb69abf2cc67f WatchSource:0}: Error finding container 0c7e5bb84105bbe0b0605076f5946942eca90b0d8bed1a1c2aebb69abf2cc67f: Status 404 returned error can't find the container with id 0c7e5bb84105bbe0b0605076f5946942eca90b0d8bed1a1c2aebb69abf2cc67f Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.653139 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.653176 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.653184 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.653198 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.653206 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:35Z","lastTransitionTime":"2025-12-02T14:16:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.755578 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.755607 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.755615 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.755632 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.755642 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:35Z","lastTransitionTime":"2025-12-02T14:16:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.866362 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.866412 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.866424 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.866442 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.866454 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:35Z","lastTransitionTime":"2025-12-02T14:16:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.968734 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.969011 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.969031 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.969058 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:35 crc kubenswrapper[4902]: I1202 14:16:35.969080 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:35Z","lastTransitionTime":"2025-12-02T14:16:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.072922 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.072961 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.072972 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.072988 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.073000 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:36Z","lastTransitionTime":"2025-12-02T14:16:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.175104 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.175150 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.175161 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.175181 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.175192 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:36Z","lastTransitionTime":"2025-12-02T14:16:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.265993 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-8tnr5" event={"ID":"a4e841e0-3d80-41c7-b03b-672d129caddd","Type":"ContainerStarted","Data":"e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.266053 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-8tnr5" event={"ID":"a4e841e0-3d80-41c7-b03b-672d129caddd","Type":"ContainerStarted","Data":"0c7e5bb84105bbe0b0605076f5946942eca90b0d8bed1a1c2aebb69abf2cc67f"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.270916 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerStarted","Data":"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.270946 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerStarted","Data":"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.272354 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vm9q6" event={"ID":"381fdb33-d71e-468a-9b1e-a2920c32f8ae","Type":"ContainerStarted","Data":"644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.274525 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.274680 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.279834 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.279867 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.279879 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.279892 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.279902 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:36Z","lastTransitionTime":"2025-12-02T14:16:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.280959 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" event={"ID":"e4c95c0d-fa60-4166-beff-455723877688","Type":"ContainerDied","Data":"b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.281369 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.280804 4902 generic.go:334] "Generic (PLEG): container finished" podID="e4c95c0d-fa60-4166-beff-455723877688" containerID="b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a" exitCode=0 Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.284922 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-qhrkh" event={"ID":"66d4b493-68d5-4caa-b053-eb1858dd41d1","Type":"ContainerStarted","Data":"d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.293125 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.305652 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.323726 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.345559 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.359832 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.382765 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.393066 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.393116 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.393133 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.393157 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.393175 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:36Z","lastTransitionTime":"2025-12-02T14:16:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.398804 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.408681 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.419231 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.434943 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.450205 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.466121 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.481491 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.495315 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.497391 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.497455 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.497466 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.497485 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.497828 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:36Z","lastTransitionTime":"2025-12-02T14:16:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.512988 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.528465 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.543220 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.563395 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.584127 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.596447 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.599702 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.599737 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.599746 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.599758 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.599768 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:36Z","lastTransitionTime":"2025-12-02T14:16:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.619197 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.631979 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.645500 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.655710 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.667404 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.682716 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.696775 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.702914 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.703179 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.703191 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.703208 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.703219 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:36Z","lastTransitionTime":"2025-12-02T14:16:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.712176 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.729277 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:36Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.805998 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.806074 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.806096 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.806125 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.806148 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:36Z","lastTransitionTime":"2025-12-02T14:16:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.908790 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.908837 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.908850 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.908867 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:36 crc kubenswrapper[4902]: I1202 14:16:36.908878 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:36Z","lastTransitionTime":"2025-12-02T14:16:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.012632 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.012696 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.012722 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.012757 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.012781 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:37Z","lastTransitionTime":"2025-12-02T14:16:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.106635 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.106687 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.106717 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:37 crc kubenswrapper[4902]: E1202 14:16:37.106838 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:37 crc kubenswrapper[4902]: E1202 14:16:37.106925 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:37 crc kubenswrapper[4902]: E1202 14:16:37.107032 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.115330 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.115388 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.115435 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.115461 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.115479 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:37Z","lastTransitionTime":"2025-12-02T14:16:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.218387 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.218465 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.218488 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.218517 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.218540 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:37Z","lastTransitionTime":"2025-12-02T14:16:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.292221 4902 generic.go:334] "Generic (PLEG): container finished" podID="e4c95c0d-fa60-4166-beff-455723877688" containerID="85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448" exitCode=0 Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.292480 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" event={"ID":"e4c95c0d-fa60-4166-beff-455723877688","Type":"ContainerDied","Data":"85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448"} Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.317501 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:37Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.322157 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.322240 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.322277 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.322308 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.322326 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:37Z","lastTransitionTime":"2025-12-02T14:16:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.337045 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:37Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.358614 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:37Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.377383 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:37Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.397057 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:37Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.411724 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:37Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.425546 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.425603 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.425615 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.425632 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.425644 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:37Z","lastTransitionTime":"2025-12-02T14:16:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.433183 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:37Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.449014 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:37Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.467102 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:37Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.482292 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:37Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.505841 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:37Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.528360 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.528404 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.528416 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.528432 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.528444 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:37Z","lastTransitionTime":"2025-12-02T14:16:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.537895 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:37Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.552825 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:37Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.564553 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:37Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.579158 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:37Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.631050 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.631100 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.631116 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.631137 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.631151 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:37Z","lastTransitionTime":"2025-12-02T14:16:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.733318 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.733369 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.733385 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.733409 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.733428 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:37Z","lastTransitionTime":"2025-12-02T14:16:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.836114 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.836173 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.836191 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.836228 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.836246 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:37Z","lastTransitionTime":"2025-12-02T14:16:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.939073 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.939116 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.939127 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.939145 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:37 crc kubenswrapper[4902]: I1202 14:16:37.939158 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:37Z","lastTransitionTime":"2025-12-02T14:16:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.043162 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.043228 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.043249 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.043280 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.043304 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:38Z","lastTransitionTime":"2025-12-02T14:16:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.146060 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.146148 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.146175 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.146206 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.146230 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:38Z","lastTransitionTime":"2025-12-02T14:16:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.248712 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.248791 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.248817 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.248880 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.248906 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:38Z","lastTransitionTime":"2025-12-02T14:16:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.300702 4902 generic.go:334] "Generic (PLEG): container finished" podID="e4c95c0d-fa60-4166-beff-455723877688" containerID="867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9" exitCode=0 Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.300769 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" event={"ID":"e4c95c0d-fa60-4166-beff-455723877688","Type":"ContainerDied","Data":"867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9"} Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.325662 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:38Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.339189 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:38Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.351516 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.351554 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.351590 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.351607 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.351584 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:38Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.351629 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:38Z","lastTransitionTime":"2025-12-02T14:16:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.367755 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:38Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.386447 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:38Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.397345 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:38Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.412127 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:38Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.442989 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:38Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.467528 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:38Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.469042 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.469074 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.469085 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.469102 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.469114 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:38Z","lastTransitionTime":"2025-12-02T14:16:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.485049 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:38Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.495719 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:38Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.508479 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:38Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.528200 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:38Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.538505 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:38Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.555142 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:38Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.571423 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.571495 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.571510 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.571530 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.571541 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:38Z","lastTransitionTime":"2025-12-02T14:16:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.673611 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.673866 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.673952 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.674052 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.674131 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:38Z","lastTransitionTime":"2025-12-02T14:16:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.776820 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.776891 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.776900 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.776915 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.776924 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:38Z","lastTransitionTime":"2025-12-02T14:16:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.880135 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.880193 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.880216 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.880246 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.880268 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:38Z","lastTransitionTime":"2025-12-02T14:16:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:38 crc kubenswrapper[4902]: I1202 14:16:38.972795 4902 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.005630 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.005791 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.005835 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.005902 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.005924 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:39Z","lastTransitionTime":"2025-12-02T14:16:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.106279 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.106489 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.106370 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:39 crc kubenswrapper[4902]: E1202 14:16:39.106705 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:39 crc kubenswrapper[4902]: E1202 14:16:39.106818 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:39 crc kubenswrapper[4902]: E1202 14:16:39.107335 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.110211 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.110272 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.110297 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.110324 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.110345 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:39Z","lastTransitionTime":"2025-12-02T14:16:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.128483 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:39Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.153038 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:39Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.190788 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:39Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.201552 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:39Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.213597 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:39Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.230952 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:39Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.247407 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.247440 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:39Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.247511 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.247529 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.247589 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.247608 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:39Z","lastTransitionTime":"2025-12-02T14:16:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.263331 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:39Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.284884 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:39Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.308325 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerStarted","Data":"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8"} Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.309717 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:39Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.330422 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:39Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.342517 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:39Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.350212 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.350305 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.350335 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.350371 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.350398 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:39Z","lastTransitionTime":"2025-12-02T14:16:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.359232 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:39Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.372462 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:39Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.383171 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:39Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.453545 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.453622 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.453636 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.453658 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.453673 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:39Z","lastTransitionTime":"2025-12-02T14:16:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.556394 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.556432 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.556443 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.556456 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.556465 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:39Z","lastTransitionTime":"2025-12-02T14:16:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.659011 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.659047 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.659055 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.659068 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.659079 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:39Z","lastTransitionTime":"2025-12-02T14:16:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.761682 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.761722 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.761737 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.761757 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.761771 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:39Z","lastTransitionTime":"2025-12-02T14:16:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.864659 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.865241 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.865270 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.865303 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.865329 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:39Z","lastTransitionTime":"2025-12-02T14:16:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.970217 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.970285 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.970309 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.970338 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:39 crc kubenswrapper[4902]: I1202 14:16:39.970359 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:39Z","lastTransitionTime":"2025-12-02T14:16:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.073637 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.073705 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.073728 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.073756 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.073777 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:40Z","lastTransitionTime":"2025-12-02T14:16:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.176786 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.176812 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.176822 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.176837 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.176848 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:40Z","lastTransitionTime":"2025-12-02T14:16:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.278791 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.278844 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.278858 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.278879 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.278891 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:40Z","lastTransitionTime":"2025-12-02T14:16:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.315741 4902 generic.go:334] "Generic (PLEG): container finished" podID="e4c95c0d-fa60-4166-beff-455723877688" containerID="2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61" exitCode=0 Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.315835 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" event={"ID":"e4c95c0d-fa60-4166-beff-455723877688","Type":"ContainerDied","Data":"2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.324492 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerStarted","Data":"fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.325132 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.339186 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.356986 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.372993 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.389785 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.409977 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.410015 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.410023 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.410038 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.410049 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:40Z","lastTransitionTime":"2025-12-02T14:16:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.413087 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.418163 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.435860 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.458271 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.480821 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.496739 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.511874 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.512270 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.512291 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.512299 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.512313 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.512324 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:40Z","lastTransitionTime":"2025-12-02T14:16:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.525189 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.538669 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.554317 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.567165 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.582326 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.597451 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.610040 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.614327 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.614357 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.614365 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.614377 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.614385 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:40Z","lastTransitionTime":"2025-12-02T14:16:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.627110 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.642386 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.655363 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.667858 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.677249 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.687340 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.687367 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.687375 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.687387 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.687395 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:40Z","lastTransitionTime":"2025-12-02T14:16:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.691957 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: E1202 14:16:40.696923 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.701127 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.701153 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.701164 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.701177 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.701187 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:40Z","lastTransitionTime":"2025-12-02T14:16:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.703833 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: E1202 14:16:40.712649 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.716062 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.716094 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.716103 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.716117 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.716128 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:40Z","lastTransitionTime":"2025-12-02T14:16:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.721473 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: E1202 14:16:40.727672 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.730774 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.730803 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.730851 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.730868 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.730879 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:40Z","lastTransitionTime":"2025-12-02T14:16:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.733317 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: E1202 14:16:40.742885 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.745550 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.745612 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.745624 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.745643 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.745655 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:40Z","lastTransitionTime":"2025-12-02T14:16:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.749502 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: E1202 14:16:40.758135 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: E1202 14:16:40.758241 4902 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.759648 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.759705 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.759718 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.759734 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.759745 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:40Z","lastTransitionTime":"2025-12-02T14:16:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.761464 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.771675 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.781642 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:40Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.873498 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.873595 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.873621 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.873655 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.873680 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:40Z","lastTransitionTime":"2025-12-02T14:16:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.978317 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.978392 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.978414 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.978444 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:40 crc kubenswrapper[4902]: I1202 14:16:40.978465 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:40Z","lastTransitionTime":"2025-12-02T14:16:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.081063 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.081123 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.081140 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.081162 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.081177 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:41Z","lastTransitionTime":"2025-12-02T14:16:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.106347 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:41 crc kubenswrapper[4902]: E1202 14:16:41.106536 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.106560 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:41 crc kubenswrapper[4902]: E1202 14:16:41.106757 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.106362 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:41 crc kubenswrapper[4902]: E1202 14:16:41.106890 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.184340 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.184402 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.184425 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.184454 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.184477 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:41Z","lastTransitionTime":"2025-12-02T14:16:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.287283 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.287481 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.287502 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.287527 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.287546 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:41Z","lastTransitionTime":"2025-12-02T14:16:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.331550 4902 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.332414 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.388390 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.390331 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.390388 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.390405 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.390430 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.390447 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:41Z","lastTransitionTime":"2025-12-02T14:16:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.409312 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:41Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.423269 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:41Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.434296 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:41Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.456295 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:41Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.470697 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:41Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.484390 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:41Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.494472 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.494518 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.494533 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.494555 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.494600 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:41Z","lastTransitionTime":"2025-12-02T14:16:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.501406 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:41Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.517630 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:41Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.534363 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:41Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.549956 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:41Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.571479 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:41Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.586668 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:41Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.596246 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.596281 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.596289 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.596305 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.596316 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:41Z","lastTransitionTime":"2025-12-02T14:16:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.606252 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:41Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.618966 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:41Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.642295 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:41Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.699423 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.699459 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.699469 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.699484 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.699494 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:41Z","lastTransitionTime":"2025-12-02T14:16:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.801786 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.801817 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.801825 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.801840 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.801848 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:41Z","lastTransitionTime":"2025-12-02T14:16:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.904852 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.904918 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.904935 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.904960 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:41 crc kubenswrapper[4902]: I1202 14:16:41.904977 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:41Z","lastTransitionTime":"2025-12-02T14:16:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.008487 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.008546 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.008582 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.008606 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.008624 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:42Z","lastTransitionTime":"2025-12-02T14:16:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.111127 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.111164 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.111173 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.111188 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.111197 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:42Z","lastTransitionTime":"2025-12-02T14:16:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.214752 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.214830 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.214855 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.214890 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.214912 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:42Z","lastTransitionTime":"2025-12-02T14:16:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.318036 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.318368 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.318618 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.318772 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.318922 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:42Z","lastTransitionTime":"2025-12-02T14:16:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.340648 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" event={"ID":"e4c95c0d-fa60-4166-beff-455723877688","Type":"ContainerStarted","Data":"e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3"} Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.340694 4902 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.364888 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:42Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.388230 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:42Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.406255 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:42Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.422127 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.422185 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.422202 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.422225 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.422246 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:42Z","lastTransitionTime":"2025-12-02T14:16:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.426946 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:42Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.461175 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:42Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.487781 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:42Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.502819 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:42Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.517168 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:42Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.524681 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.524709 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.524717 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.524730 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.524740 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:42Z","lastTransitionTime":"2025-12-02T14:16:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.532138 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:42Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.545984 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:42Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.563891 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:42Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.580986 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:42Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.599882 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:42Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.620073 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:42Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.627355 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.627414 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.627432 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.627459 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.627477 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:42Z","lastTransitionTime":"2025-12-02T14:16:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.638766 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:42Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.730269 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.730595 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.730695 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.730789 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.730867 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:42Z","lastTransitionTime":"2025-12-02T14:16:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.834211 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.834662 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.834878 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.835069 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.835247 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:42Z","lastTransitionTime":"2025-12-02T14:16:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.937666 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.937704 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.937716 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.937732 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:42 crc kubenswrapper[4902]: I1202 14:16:42.937743 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:42Z","lastTransitionTime":"2025-12-02T14:16:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.040185 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.040235 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.040250 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.040269 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.040287 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:43Z","lastTransitionTime":"2025-12-02T14:16:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.060235 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.060398 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.060468 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.060531 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:16:59.060497701 +0000 UTC m=+50.251806410 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.060586 4902 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.060615 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.060640 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:59.060628105 +0000 UTC m=+50.251936934 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.060646 4902 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.060700 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.060724 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:59.060700587 +0000 UTC m=+50.252009336 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.060826 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.060842 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.060850 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.060863 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.060873 4902 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.060877 4902 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.060910 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:59.060902373 +0000 UTC m=+50.252211082 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.060933 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 14:16:59.060917403 +0000 UTC m=+50.252226242 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.106098 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.106217 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.106144 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.106349 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.106454 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:43 crc kubenswrapper[4902]: E1202 14:16:43.106556 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.143084 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.143138 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.143149 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.143164 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.143175 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:43Z","lastTransitionTime":"2025-12-02T14:16:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.245383 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.245439 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.245457 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.245483 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.245501 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:43Z","lastTransitionTime":"2025-12-02T14:16:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.347208 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.347605 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.347816 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.348099 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.348299 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:43Z","lastTransitionTime":"2025-12-02T14:16:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.348416 4902 generic.go:334] "Generic (PLEG): container finished" podID="e4c95c0d-fa60-4166-beff-455723877688" containerID="e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3" exitCode=0 Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.348444 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" event={"ID":"e4c95c0d-fa60-4166-beff-455723877688","Type":"ContainerDied","Data":"e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3"} Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.349019 4902 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.367443 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.387868 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.402589 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.418319 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.440073 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.451068 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.451136 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.451150 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.451166 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.451177 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:43Z","lastTransitionTime":"2025-12-02T14:16:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.459026 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.478770 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.510104 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.534581 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.553353 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.553397 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.553409 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.553426 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.553438 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:43Z","lastTransitionTime":"2025-12-02T14:16:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.573522 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.584594 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.602932 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.614831 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.624661 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.640075 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.656118 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.656167 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.656177 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.656194 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.656206 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:43Z","lastTransitionTime":"2025-12-02T14:16:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.758273 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.758313 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.758324 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.758339 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.758349 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:43Z","lastTransitionTime":"2025-12-02T14:16:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.861107 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.861282 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.861296 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.861314 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.861326 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:43Z","lastTransitionTime":"2025-12-02T14:16:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.861736 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.877909 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.897877 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.915115 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.933379 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.949013 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.963957 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.963999 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.964019 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.964043 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.964060 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:43Z","lastTransitionTime":"2025-12-02T14:16:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:43 crc kubenswrapper[4902]: I1202 14:16:43.981466 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:43Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.022981 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.037639 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.069778 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.069830 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.069848 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.069873 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.069891 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:44Z","lastTransitionTime":"2025-12-02T14:16:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.070955 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.085457 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.100147 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.112104 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.129877 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.149594 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.163736 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.171684 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.171720 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.171733 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.171785 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.171799 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:44Z","lastTransitionTime":"2025-12-02T14:16:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.274346 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.274408 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.274428 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.274454 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.274471 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:44Z","lastTransitionTime":"2025-12-02T14:16:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.357722 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/0.log" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.361438 4902 generic.go:334] "Generic (PLEG): container finished" podID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerID="fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260" exitCode=1 Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.361503 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerDied","Data":"fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260"} Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.362233 4902 scope.go:117] "RemoveContainer" containerID="fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.369586 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" event={"ID":"e4c95c0d-fa60-4166-beff-455723877688","Type":"ContainerStarted","Data":"1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc"} Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.382833 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.382861 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.382872 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.382886 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.382897 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:44Z","lastTransitionTime":"2025-12-02T14:16:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.388760 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.410036 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.428317 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.453512 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.474385 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.485782 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.485833 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.485850 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.485875 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.485893 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:44Z","lastTransitionTime":"2025-12-02T14:16:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.493816 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.509646 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.529691 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.549184 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.569598 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.585923 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.588481 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.588517 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.588530 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.588549 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.588579 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:44Z","lastTransitionTime":"2025-12-02T14:16:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.604523 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.632988 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.646074 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.674198 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"message\\\":\\\"s/externalversions/factory.go:140\\\\nI1202 14:16:43.820064 6168 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 14:16:43.820557 6168 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1202 14:16:43.820618 6168 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1202 14:16:43.820635 6168 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 14:16:43.820683 6168 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 14:16:43.820697 6168 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 14:16:43.820758 6168 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 14:16:43.820820 6168 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1202 14:16:43.820863 6168 factory.go:656] Stopping watch factory\\\\nI1202 14:16:43.820900 6168 ovnkube.go:599] Stopped ovnkube\\\\nI1202 14:16:43.820921 6168 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1202 14:16:43.820727 6168 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1202 14:16:43.820710 6168 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 14:16:43.820905 6168 handler.go:208] Removed *v1.Node event handler 2\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:44Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.691400 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.691463 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.691481 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.691505 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.691523 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:44Z","lastTransitionTime":"2025-12-02T14:16:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.838442 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.838857 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.838873 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.838895 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.838910 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:44Z","lastTransitionTime":"2025-12-02T14:16:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.942093 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.942165 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.942185 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.942212 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:44 crc kubenswrapper[4902]: I1202 14:16:44.942232 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:44Z","lastTransitionTime":"2025-12-02T14:16:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.045381 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.045426 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.045449 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.045472 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.045487 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:45Z","lastTransitionTime":"2025-12-02T14:16:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.106236 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:45 crc kubenswrapper[4902]: E1202 14:16:45.106418 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.106253 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.106494 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:45 crc kubenswrapper[4902]: E1202 14:16:45.106588 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:45 crc kubenswrapper[4902]: E1202 14:16:45.106683 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.147202 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.147257 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.147273 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.147296 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.147311 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:45Z","lastTransitionTime":"2025-12-02T14:16:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.249183 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.249228 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.249237 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.249255 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.249265 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:45Z","lastTransitionTime":"2025-12-02T14:16:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.351113 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.351148 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.351156 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.351169 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.351178 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:45Z","lastTransitionTime":"2025-12-02T14:16:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.376637 4902 generic.go:334] "Generic (PLEG): container finished" podID="e4c95c0d-fa60-4166-beff-455723877688" containerID="1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc" exitCode=0 Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.376713 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" event={"ID":"e4c95c0d-fa60-4166-beff-455723877688","Type":"ContainerDied","Data":"1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc"} Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.378947 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/0.log" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.381400 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerStarted","Data":"b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06"} Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.381517 4902 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.392609 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.407015 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.422365 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.436546 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.453353 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.453405 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.453418 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.453436 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.453449 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:45Z","lastTransitionTime":"2025-12-02T14:16:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.490714 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.505852 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.526305 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l"] Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.526313 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"message\\\":\\\"s/externalversions/factory.go:140\\\\nI1202 14:16:43.820064 6168 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 14:16:43.820557 6168 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1202 14:16:43.820618 6168 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1202 14:16:43.820635 6168 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 14:16:43.820683 6168 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 14:16:43.820697 6168 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 14:16:43.820758 6168 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 14:16:43.820820 6168 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1202 14:16:43.820863 6168 factory.go:656] Stopping watch factory\\\\nI1202 14:16:43.820900 6168 ovnkube.go:599] Stopped ovnkube\\\\nI1202 14:16:43.820921 6168 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1202 14:16:43.820727 6168 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1202 14:16:43.820710 6168 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 14:16:43.820905 6168 handler.go:208] Removed *v1.Node event handler 2\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.527027 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.529300 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.529554 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.542778 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.543203 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-rfj5l\" (UID: \"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.543265 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rfj5l\" (UID: \"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.543296 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d-env-overrides\") pod \"ovnkube-control-plane-749d76644c-rfj5l\" (UID: \"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.553114 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.555434 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.555470 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.555478 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.555492 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.555501 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:45Z","lastTransitionTime":"2025-12-02T14:16:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.565097 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.579656 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.594982 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.611777 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.629994 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.642337 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.643722 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-rfj5l\" (UID: \"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.643786 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rfj5l\" (UID: \"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.643813 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d-env-overrides\") pod \"ovnkube-control-plane-749d76644c-rfj5l\" (UID: \"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.643853 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjpn7\" (UniqueName: \"kubernetes.io/projected/9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d-kube-api-access-hjpn7\") pod \"ovnkube-control-plane-749d76644c-rfj5l\" (UID: \"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.644348 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d-env-overrides\") pod \"ovnkube-control-plane-749d76644c-rfj5l\" (UID: \"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.645229 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-rfj5l\" (UID: \"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.651414 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rfj5l\" (UID: \"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.652589 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.657008 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.657038 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.657048 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.657064 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.657075 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:45Z","lastTransitionTime":"2025-12-02T14:16:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.665675 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.676248 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.702086 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.714627 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.726461 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.741278 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.744411 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjpn7\" (UniqueName: \"kubernetes.io/projected/9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d-kube-api-access-hjpn7\") pod \"ovnkube-control-plane-749d76644c-rfj5l\" (UID: \"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.753239 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.759767 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.759804 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.759816 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.759832 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.759841 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:45Z","lastTransitionTime":"2025-12-02T14:16:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.764443 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.765086 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjpn7\" (UniqueName: \"kubernetes.io/projected/9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d-kube-api-access-hjpn7\") pod \"ovnkube-control-plane-749d76644c-rfj5l\" (UID: \"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.778675 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.796754 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.807217 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.823619 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"message\\\":\\\"s/externalversions/factory.go:140\\\\nI1202 14:16:43.820064 6168 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 14:16:43.820557 6168 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1202 14:16:43.820618 6168 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1202 14:16:43.820635 6168 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 14:16:43.820683 6168 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 14:16:43.820697 6168 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 14:16:43.820758 6168 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 14:16:43.820820 6168 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1202 14:16:43.820863 6168 factory.go:656] Stopping watch factory\\\\nI1202 14:16:43.820900 6168 ovnkube.go:599] Stopped ovnkube\\\\nI1202 14:16:43.820921 6168 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1202 14:16:43.820727 6168 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1202 14:16:43.820710 6168 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 14:16:43.820905 6168 handler.go:208] Removed *v1.Node event handler 2\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.838287 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.848643 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.849694 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.862642 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:45Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.862831 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.862855 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.862864 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.862878 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.862888 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:45Z","lastTransitionTime":"2025-12-02T14:16:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:45 crc kubenswrapper[4902]: W1202 14:16:45.864892 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9ddca97d_8b68_4f8f_93f3_3a7b94d76e4d.slice/crio-74a486c554c399198003d23e34320cd0206b3b515e7695944928591696b56db5 WatchSource:0}: Error finding container 74a486c554c399198003d23e34320cd0206b3b515e7695944928591696b56db5: Status 404 returned error can't find the container with id 74a486c554c399198003d23e34320cd0206b3b515e7695944928591696b56db5 Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.965109 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.965146 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.965154 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.965167 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:45 crc kubenswrapper[4902]: I1202 14:16:45.965177 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:45Z","lastTransitionTime":"2025-12-02T14:16:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.067542 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.067636 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.067661 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.067690 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.067710 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:46Z","lastTransitionTime":"2025-12-02T14:16:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.170663 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.170716 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.170730 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.170747 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.170756 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:46Z","lastTransitionTime":"2025-12-02T14:16:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.273969 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.274015 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.274031 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.274051 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.274064 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:46Z","lastTransitionTime":"2025-12-02T14:16:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.376280 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.376330 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.376345 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.376366 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.376382 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:46Z","lastTransitionTime":"2025-12-02T14:16:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.387507 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" event={"ID":"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d","Type":"ContainerStarted","Data":"d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c"} Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.387558 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" event={"ID":"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d","Type":"ContainerStarted","Data":"74a486c554c399198003d23e34320cd0206b3b515e7695944928591696b56db5"} Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.389625 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/1.log" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.390277 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/0.log" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.393428 4902 generic.go:334] "Generic (PLEG): container finished" podID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerID="b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06" exitCode=1 Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.393487 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerDied","Data":"b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06"} Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.393530 4902 scope.go:117] "RemoveContainer" containerID="fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.394526 4902 scope.go:117] "RemoveContainer" containerID="b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06" Dec 02 14:16:46 crc kubenswrapper[4902]: E1202 14:16:46.394896 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-q55jp_openshift-ovn-kubernetes(9379a49e-d66c-4224-a564-f00d4cadd9ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.398815 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" event={"ID":"e4c95c0d-fa60-4166-beff-455723877688","Type":"ContainerStarted","Data":"b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c"} Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.407807 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.422167 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.435044 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.452643 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.469403 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.478501 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.478664 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.478739 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.478813 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.478898 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:46Z","lastTransitionTime":"2025-12-02T14:16:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.485621 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.500489 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.512524 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.525373 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.540971 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.559839 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.572258 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.581429 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.581473 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.581486 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.581504 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.581519 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:46Z","lastTransitionTime":"2025-12-02T14:16:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.593384 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"message\\\":\\\"s/externalversions/factory.go:140\\\\nI1202 14:16:43.820064 6168 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 14:16:43.820557 6168 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1202 14:16:43.820618 6168 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1202 14:16:43.820635 6168 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 14:16:43.820683 6168 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 14:16:43.820697 6168 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 14:16:43.820758 6168 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 14:16:43.820820 6168 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1202 14:16:43.820863 6168 factory.go:656] Stopping watch factory\\\\nI1202 14:16:43.820900 6168 ovnkube.go:599] Stopped ovnkube\\\\nI1202 14:16:43.820921 6168 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1202 14:16:43.820727 6168 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1202 14:16:43.820710 6168 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 14:16:43.820905 6168 handler.go:208] Removed *v1.Node event handler 2\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\" 6369 services_controller.go:360] Finished syncing service api on namespace openshift-apiserver for network=default : 1.468223ms\\\\nI1202 14:16:45.629239 6369 obj_retry.go:551] Creating *factory.egressNode crc took: 10.74682ms\\\\nI1202 14:16:45.629260 6369 factory.go:1336] Added *v1.Node event handler 7\\\\nI1202 14:16:45.629291 6369 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629301 6369 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 14:16:45.629307 6369 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 14:16:45.629325 6369 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 14:16:45.629340 6369 factory.go:656] Stopping watch factory\\\\nI1202 14:16:45.629351 6369 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629350 6369 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 14:16:45.629376 6369 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 14:16:45.629507 6369 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1202 14:16:45.629594 6369 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1202 14:16:45.629636 6369 ovnkube.go:599] Stopped ovnkube\\\\nI1202 14:16:45.629663 6369 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 14:16:45.629736 6369 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.612642 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.622662 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.626920 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-tlnwn"] Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.627597 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:46 crc kubenswrapper[4902]: E1202 14:16:46.627684 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.639673 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.653324 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.653855 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs\") pod \"network-metrics-daemon-tlnwn\" (UID: \"b63b8b19-f855-4038-891d-6bfd1e5021de\") " pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.653903 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjj4v\" (UniqueName: \"kubernetes.io/projected/b63b8b19-f855-4038-891d-6bfd1e5021de-kube-api-access-mjj4v\") pod \"network-metrics-daemon-tlnwn\" (UID: \"b63b8b19-f855-4038-891d-6bfd1e5021de\") " pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.665694 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.683548 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.687055 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.687094 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.687108 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.687132 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.687146 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:46Z","lastTransitionTime":"2025-12-02T14:16:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.698955 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.713794 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.733709 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.748421 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.754668 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs\") pod \"network-metrics-daemon-tlnwn\" (UID: \"b63b8b19-f855-4038-891d-6bfd1e5021de\") " pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.754697 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjj4v\" (UniqueName: \"kubernetes.io/projected/b63b8b19-f855-4038-891d-6bfd1e5021de-kube-api-access-mjj4v\") pod \"network-metrics-daemon-tlnwn\" (UID: \"b63b8b19-f855-4038-891d-6bfd1e5021de\") " pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:46 crc kubenswrapper[4902]: E1202 14:16:46.754822 4902 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:16:46 crc kubenswrapper[4902]: E1202 14:16:46.754893 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs podName:b63b8b19-f855-4038-891d-6bfd1e5021de nodeName:}" failed. No retries permitted until 2025-12-02 14:16:47.254870481 +0000 UTC m=+38.446179180 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs") pod "network-metrics-daemon-tlnwn" (UID: "b63b8b19-f855-4038-891d-6bfd1e5021de") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.760768 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.774272 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjj4v\" (UniqueName: \"kubernetes.io/projected/b63b8b19-f855-4038-891d-6bfd1e5021de-kube-api-access-mjj4v\") pod \"network-metrics-daemon-tlnwn\" (UID: \"b63b8b19-f855-4038-891d-6bfd1e5021de\") " pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.779411 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.789191 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.789230 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.789239 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.789253 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.789266 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:46Z","lastTransitionTime":"2025-12-02T14:16:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.790421 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.800917 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.811786 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.822896 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.837378 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.855700 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.870167 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.891542 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"message\\\":\\\"s/externalversions/factory.go:140\\\\nI1202 14:16:43.820064 6168 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 14:16:43.820557 6168 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1202 14:16:43.820618 6168 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1202 14:16:43.820635 6168 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 14:16:43.820683 6168 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 14:16:43.820697 6168 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 14:16:43.820758 6168 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 14:16:43.820820 6168 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1202 14:16:43.820863 6168 factory.go:656] Stopping watch factory\\\\nI1202 14:16:43.820900 6168 ovnkube.go:599] Stopped ovnkube\\\\nI1202 14:16:43.820921 6168 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1202 14:16:43.820727 6168 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1202 14:16:43.820710 6168 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 14:16:43.820905 6168 handler.go:208] Removed *v1.Node event handler 2\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\" 6369 services_controller.go:360] Finished syncing service api on namespace openshift-apiserver for network=default : 1.468223ms\\\\nI1202 14:16:45.629239 6369 obj_retry.go:551] Creating *factory.egressNode crc took: 10.74682ms\\\\nI1202 14:16:45.629260 6369 factory.go:1336] Added *v1.Node event handler 7\\\\nI1202 14:16:45.629291 6369 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629301 6369 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 14:16:45.629307 6369 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 14:16:45.629325 6369 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 14:16:45.629340 6369 factory.go:656] Stopping watch factory\\\\nI1202 14:16:45.629351 6369 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629350 6369 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 14:16:45.629376 6369 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 14:16:45.629507 6369 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1202 14:16:45.629594 6369 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1202 14:16:45.629636 6369 ovnkube.go:599] Stopped ovnkube\\\\nI1202 14:16:45.629663 6369 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 14:16:45.629736 6369 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:46Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.892096 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.892126 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.892140 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.892161 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.892177 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:46Z","lastTransitionTime":"2025-12-02T14:16:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.994343 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.994420 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.994439 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.994467 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:46 crc kubenswrapper[4902]: I1202 14:16:46.994487 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:46Z","lastTransitionTime":"2025-12-02T14:16:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.097931 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.097984 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.097998 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.098017 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.098029 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:47Z","lastTransitionTime":"2025-12-02T14:16:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.106406 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.106471 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:47 crc kubenswrapper[4902]: E1202 14:16:47.106619 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.106635 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:47 crc kubenswrapper[4902]: E1202 14:16:47.106773 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:47 crc kubenswrapper[4902]: E1202 14:16:47.106872 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.200484 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.200528 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.200539 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.200556 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.200590 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:47Z","lastTransitionTime":"2025-12-02T14:16:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.260494 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs\") pod \"network-metrics-daemon-tlnwn\" (UID: \"b63b8b19-f855-4038-891d-6bfd1e5021de\") " pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:47 crc kubenswrapper[4902]: E1202 14:16:47.260690 4902 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:16:47 crc kubenswrapper[4902]: E1202 14:16:47.261096 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs podName:b63b8b19-f855-4038-891d-6bfd1e5021de nodeName:}" failed. No retries permitted until 2025-12-02 14:16:48.261069389 +0000 UTC m=+39.452378138 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs") pod "network-metrics-daemon-tlnwn" (UID: "b63b8b19-f855-4038-891d-6bfd1e5021de") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.302710 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.302745 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.302757 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.302775 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.302788 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:47Z","lastTransitionTime":"2025-12-02T14:16:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.404689 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.404949 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.405026 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.405138 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.405218 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:47Z","lastTransitionTime":"2025-12-02T14:16:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.405965 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/1.log" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.415051 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" event={"ID":"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d","Type":"ContainerStarted","Data":"27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e"} Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.434969 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.448497 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.479639 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"message\\\":\\\"s/externalversions/factory.go:140\\\\nI1202 14:16:43.820064 6168 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 14:16:43.820557 6168 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1202 14:16:43.820618 6168 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1202 14:16:43.820635 6168 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 14:16:43.820683 6168 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 14:16:43.820697 6168 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 14:16:43.820758 6168 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 14:16:43.820820 6168 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1202 14:16:43.820863 6168 factory.go:656] Stopping watch factory\\\\nI1202 14:16:43.820900 6168 ovnkube.go:599] Stopped ovnkube\\\\nI1202 14:16:43.820921 6168 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1202 14:16:43.820727 6168 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1202 14:16:43.820710 6168 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 14:16:43.820905 6168 handler.go:208] Removed *v1.Node event handler 2\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\" 6369 services_controller.go:360] Finished syncing service api on namespace openshift-apiserver for network=default : 1.468223ms\\\\nI1202 14:16:45.629239 6369 obj_retry.go:551] Creating *factory.egressNode crc took: 10.74682ms\\\\nI1202 14:16:45.629260 6369 factory.go:1336] Added *v1.Node event handler 7\\\\nI1202 14:16:45.629291 6369 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629301 6369 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 14:16:45.629307 6369 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 14:16:45.629325 6369 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 14:16:45.629340 6369 factory.go:656] Stopping watch factory\\\\nI1202 14:16:45.629351 6369 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629350 6369 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 14:16:45.629376 6369 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 14:16:45.629507 6369 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1202 14:16:45.629594 6369 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1202 14:16:45.629636 6369 ovnkube.go:599] Stopped ovnkube\\\\nI1202 14:16:45.629663 6369 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 14:16:45.629736 6369 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.499128 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.509460 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.509512 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.509533 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.509556 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.509598 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:47Z","lastTransitionTime":"2025-12-02T14:16:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.514338 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.529939 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.548894 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.561169 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.577188 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.591978 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.612969 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.613058 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.613083 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.613114 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.613135 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:47Z","lastTransitionTime":"2025-12-02T14:16:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.613615 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.632325 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.649839 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.667627 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.680056 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.692390 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.714275 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:47Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.726899 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.726936 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.726946 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.726960 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.726969 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:47Z","lastTransitionTime":"2025-12-02T14:16:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.829605 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.829678 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.829695 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.829719 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.829737 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:47Z","lastTransitionTime":"2025-12-02T14:16:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.932724 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.932767 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.932779 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.932795 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:47 crc kubenswrapper[4902]: I1202 14:16:47.932806 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:47Z","lastTransitionTime":"2025-12-02T14:16:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.035957 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.036019 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.036036 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.036060 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.036078 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:48Z","lastTransitionTime":"2025-12-02T14:16:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.106096 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:48 crc kubenswrapper[4902]: E1202 14:16:48.106466 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.139963 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.140050 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.140067 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.140092 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.140109 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:48Z","lastTransitionTime":"2025-12-02T14:16:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.254004 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.254139 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.254159 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.254183 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.254201 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:48Z","lastTransitionTime":"2025-12-02T14:16:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.272198 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs\") pod \"network-metrics-daemon-tlnwn\" (UID: \"b63b8b19-f855-4038-891d-6bfd1e5021de\") " pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:48 crc kubenswrapper[4902]: E1202 14:16:48.272410 4902 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:16:48 crc kubenswrapper[4902]: E1202 14:16:48.272538 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs podName:b63b8b19-f855-4038-891d-6bfd1e5021de nodeName:}" failed. No retries permitted until 2025-12-02 14:16:50.272501849 +0000 UTC m=+41.463810608 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs") pod "network-metrics-daemon-tlnwn" (UID: "b63b8b19-f855-4038-891d-6bfd1e5021de") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.356608 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.356665 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.356682 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.356708 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.356727 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:48Z","lastTransitionTime":"2025-12-02T14:16:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.459407 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.459470 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.459487 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.459511 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.459529 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:48Z","lastTransitionTime":"2025-12-02T14:16:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.562658 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.562712 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.562723 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.562743 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.562760 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:48Z","lastTransitionTime":"2025-12-02T14:16:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.741926 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.742219 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.742315 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.742395 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.742480 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:48Z","lastTransitionTime":"2025-12-02T14:16:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.845444 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.845486 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.845495 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.845508 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.845517 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:48Z","lastTransitionTime":"2025-12-02T14:16:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.948662 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.948719 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.948741 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.948770 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:48 crc kubenswrapper[4902]: I1202 14:16:48.948791 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:48Z","lastTransitionTime":"2025-12-02T14:16:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.051830 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.051906 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.051929 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.051959 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.051980 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:49Z","lastTransitionTime":"2025-12-02T14:16:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.105778 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:49 crc kubenswrapper[4902]: E1202 14:16:49.105954 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.105991 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:49 crc kubenswrapper[4902]: E1202 14:16:49.106099 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.105994 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:49 crc kubenswrapper[4902]: E1202 14:16:49.106382 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.129077 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.155479 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.155532 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.155542 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.155575 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.155587 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:49Z","lastTransitionTime":"2025-12-02T14:16:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.161434 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.196499 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.215832 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.244125 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd38c1723fa13e2f1b3bfafb3337d99bea55352270a1cc6a743747d874861260\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"message\\\":\\\"s/externalversions/factory.go:140\\\\nI1202 14:16:43.820064 6168 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 14:16:43.820557 6168 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1202 14:16:43.820618 6168 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1202 14:16:43.820635 6168 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 14:16:43.820683 6168 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 14:16:43.820697 6168 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 14:16:43.820758 6168 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 14:16:43.820820 6168 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1202 14:16:43.820863 6168 factory.go:656] Stopping watch factory\\\\nI1202 14:16:43.820900 6168 ovnkube.go:599] Stopped ovnkube\\\\nI1202 14:16:43.820921 6168 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1202 14:16:43.820727 6168 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1202 14:16:43.820710 6168 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 14:16:43.820905 6168 handler.go:208] Removed *v1.Node event handler 2\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\" 6369 services_controller.go:360] Finished syncing service api on namespace openshift-apiserver for network=default : 1.468223ms\\\\nI1202 14:16:45.629239 6369 obj_retry.go:551] Creating *factory.egressNode crc took: 10.74682ms\\\\nI1202 14:16:45.629260 6369 factory.go:1336] Added *v1.Node event handler 7\\\\nI1202 14:16:45.629291 6369 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629301 6369 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 14:16:45.629307 6369 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 14:16:45.629325 6369 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 14:16:45.629340 6369 factory.go:656] Stopping watch factory\\\\nI1202 14:16:45.629351 6369 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629350 6369 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 14:16:45.629376 6369 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 14:16:45.629507 6369 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1202 14:16:45.629594 6369 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1202 14:16:45.629636 6369 ovnkube.go:599] Stopped ovnkube\\\\nI1202 14:16:45.629663 6369 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 14:16:45.629736 6369 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.258869 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.258908 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.258921 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.258941 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.258956 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:49Z","lastTransitionTime":"2025-12-02T14:16:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.270288 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.283239 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.292060 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.304504 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.321183 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.338061 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.356583 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.361271 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.361338 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.361356 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.361385 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.361403 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:49Z","lastTransitionTime":"2025-12-02T14:16:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.370065 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.383863 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.400315 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.421937 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.434365 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:49Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.463300 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.463336 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.463347 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.463366 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.463378 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:49Z","lastTransitionTime":"2025-12-02T14:16:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.565927 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.565985 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.566003 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.566024 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.566042 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:49Z","lastTransitionTime":"2025-12-02T14:16:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.668758 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.668867 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.668892 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.668926 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.668949 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:49Z","lastTransitionTime":"2025-12-02T14:16:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.772274 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.772324 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.772336 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.772360 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.772375 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:49Z","lastTransitionTime":"2025-12-02T14:16:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.875610 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.875666 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.875683 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.875711 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.875728 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:49Z","lastTransitionTime":"2025-12-02T14:16:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.979854 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.979934 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.979956 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.979989 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:49 crc kubenswrapper[4902]: I1202 14:16:49.980020 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:49Z","lastTransitionTime":"2025-12-02T14:16:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.083461 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.083524 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.083536 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.083591 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.083606 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:50Z","lastTransitionTime":"2025-12-02T14:16:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.105949 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:50 crc kubenswrapper[4902]: E1202 14:16:50.106196 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.187375 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.187428 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.187445 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.187471 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.187495 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:50Z","lastTransitionTime":"2025-12-02T14:16:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.290803 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.290860 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.290875 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.290897 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.290908 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:50Z","lastTransitionTime":"2025-12-02T14:16:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.316166 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs\") pod \"network-metrics-daemon-tlnwn\" (UID: \"b63b8b19-f855-4038-891d-6bfd1e5021de\") " pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:50 crc kubenswrapper[4902]: E1202 14:16:50.316406 4902 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:16:50 crc kubenswrapper[4902]: E1202 14:16:50.316542 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs podName:b63b8b19-f855-4038-891d-6bfd1e5021de nodeName:}" failed. No retries permitted until 2025-12-02 14:16:54.316511967 +0000 UTC m=+45.507820676 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs") pod "network-metrics-daemon-tlnwn" (UID: "b63b8b19-f855-4038-891d-6bfd1e5021de") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.394475 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.394538 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.394554 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.394595 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.394613 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:50Z","lastTransitionTime":"2025-12-02T14:16:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.497948 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.498023 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.498041 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.498075 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.498089 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:50Z","lastTransitionTime":"2025-12-02T14:16:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.600795 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.600875 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.600899 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.600931 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.600955 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:50Z","lastTransitionTime":"2025-12-02T14:16:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.703670 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.703724 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.703734 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.703752 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.703764 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:50Z","lastTransitionTime":"2025-12-02T14:16:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.806958 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.807039 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.807078 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.807111 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.807133 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:50Z","lastTransitionTime":"2025-12-02T14:16:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.911527 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.911603 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.911620 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.911644 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.911661 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:50Z","lastTransitionTime":"2025-12-02T14:16:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.956878 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.956930 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.956947 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.956969 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.956986 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:50Z","lastTransitionTime":"2025-12-02T14:16:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:50 crc kubenswrapper[4902]: E1202 14:16:50.974970 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:50Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.979748 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.979793 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.979805 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.979821 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:50 crc kubenswrapper[4902]: I1202 14:16:50.979835 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:50Z","lastTransitionTime":"2025-12-02T14:16:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:51 crc kubenswrapper[4902]: E1202 14:16:51.001089 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:50Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.006720 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.006768 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.006784 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.006808 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.006825 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:51Z","lastTransitionTime":"2025-12-02T14:16:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:51 crc kubenswrapper[4902]: E1202 14:16:51.026473 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:51Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.031502 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.031575 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.031590 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.031608 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.031619 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:51Z","lastTransitionTime":"2025-12-02T14:16:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:51 crc kubenswrapper[4902]: E1202 14:16:51.046367 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:51Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.050847 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.050947 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.050965 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.050988 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.051006 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:51Z","lastTransitionTime":"2025-12-02T14:16:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:51 crc kubenswrapper[4902]: E1202 14:16:51.064207 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:51Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:51 crc kubenswrapper[4902]: E1202 14:16:51.064363 4902 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.065639 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.065672 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.065683 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.065697 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.065707 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:51Z","lastTransitionTime":"2025-12-02T14:16:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.106406 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:51 crc kubenswrapper[4902]: E1202 14:16:51.106941 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.106615 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.106523 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:51 crc kubenswrapper[4902]: E1202 14:16:51.107430 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:51 crc kubenswrapper[4902]: E1202 14:16:51.107326 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.169093 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.169154 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.169173 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.169201 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.169221 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:51Z","lastTransitionTime":"2025-12-02T14:16:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.271473 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.271530 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.271547 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.271601 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.271618 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:51Z","lastTransitionTime":"2025-12-02T14:16:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.375037 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.375128 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.375147 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.375172 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.375190 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:51Z","lastTransitionTime":"2025-12-02T14:16:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.478380 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.478438 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.478457 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.478481 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.478501 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:51Z","lastTransitionTime":"2025-12-02T14:16:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.582320 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.582386 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.582410 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.582441 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.582463 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:51Z","lastTransitionTime":"2025-12-02T14:16:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.686269 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.686356 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.686384 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.686418 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.686443 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:51Z","lastTransitionTime":"2025-12-02T14:16:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.789414 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.789481 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.789498 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.789528 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:51 crc kubenswrapper[4902]: I1202 14:16:51.789546 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:51Z","lastTransitionTime":"2025-12-02T14:16:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.149309 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:52 crc kubenswrapper[4902]: E1202 14:16:52.150338 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.152487 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.155108 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.155140 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.155163 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.155180 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:52Z","lastTransitionTime":"2025-12-02T14:16:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.258403 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.258744 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.258945 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.259091 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.259208 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:52Z","lastTransitionTime":"2025-12-02T14:16:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.363220 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.363941 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.364056 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.364219 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.364340 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:52Z","lastTransitionTime":"2025-12-02T14:16:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.466876 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.466914 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.466925 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.466946 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.466959 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:52Z","lastTransitionTime":"2025-12-02T14:16:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.569926 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.569971 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.569986 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.570004 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.570015 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:52Z","lastTransitionTime":"2025-12-02T14:16:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.673455 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.673957 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.674126 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.674486 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.674716 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:52Z","lastTransitionTime":"2025-12-02T14:16:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.777838 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.778125 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.778238 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.778329 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.778412 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:52Z","lastTransitionTime":"2025-12-02T14:16:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.882159 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.882469 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.882787 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.883083 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.883228 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:52Z","lastTransitionTime":"2025-12-02T14:16:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.985968 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.986030 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.986048 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.986075 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:52 crc kubenswrapper[4902]: I1202 14:16:52.986093 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:52Z","lastTransitionTime":"2025-12-02T14:16:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.088366 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.088424 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.088443 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.088468 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.088486 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:53Z","lastTransitionTime":"2025-12-02T14:16:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.106426 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.106431 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.106624 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:53 crc kubenswrapper[4902]: E1202 14:16:53.106927 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:53 crc kubenswrapper[4902]: E1202 14:16:53.107426 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:53 crc kubenswrapper[4902]: E1202 14:16:53.108698 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.191167 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.191553 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.191747 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.191896 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.192040 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:53Z","lastTransitionTime":"2025-12-02T14:16:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.295617 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.295677 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.295694 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.295718 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.295735 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:53Z","lastTransitionTime":"2025-12-02T14:16:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.398406 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.398473 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.398494 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.398525 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.398544 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:53Z","lastTransitionTime":"2025-12-02T14:16:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.501424 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.501462 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.501471 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.501486 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.501501 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:53Z","lastTransitionTime":"2025-12-02T14:16:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.603956 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.604246 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.604318 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.604388 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.604459 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:53Z","lastTransitionTime":"2025-12-02T14:16:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.707981 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.708464 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.708771 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.709011 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.709209 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:53Z","lastTransitionTime":"2025-12-02T14:16:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.812402 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.812441 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.812449 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.812466 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.812477 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:53Z","lastTransitionTime":"2025-12-02T14:16:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.915749 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.915823 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.915854 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.915879 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:53 crc kubenswrapper[4902]: I1202 14:16:53.915895 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:53Z","lastTransitionTime":"2025-12-02T14:16:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.018983 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.019045 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.019063 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.019086 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.019103 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:54Z","lastTransitionTime":"2025-12-02T14:16:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.106361 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:54 crc kubenswrapper[4902]: E1202 14:16:54.106495 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.122351 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.122437 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.122466 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.122498 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.122524 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:54Z","lastTransitionTime":"2025-12-02T14:16:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.225834 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.225916 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.225954 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.225987 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.226015 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:54Z","lastTransitionTime":"2025-12-02T14:16:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.329432 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.329490 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.329506 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.329529 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.329546 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:54Z","lastTransitionTime":"2025-12-02T14:16:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.381702 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs\") pod \"network-metrics-daemon-tlnwn\" (UID: \"b63b8b19-f855-4038-891d-6bfd1e5021de\") " pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:54 crc kubenswrapper[4902]: E1202 14:16:54.381902 4902 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:16:54 crc kubenswrapper[4902]: E1202 14:16:54.382001 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs podName:b63b8b19-f855-4038-891d-6bfd1e5021de nodeName:}" failed. No retries permitted until 2025-12-02 14:17:02.381977783 +0000 UTC m=+53.573286532 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs") pod "network-metrics-daemon-tlnwn" (UID: "b63b8b19-f855-4038-891d-6bfd1e5021de") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.432988 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.433066 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.433089 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.433127 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.433151 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:54Z","lastTransitionTime":"2025-12-02T14:16:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.536094 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.536165 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.536200 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.536228 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.536247 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:54Z","lastTransitionTime":"2025-12-02T14:16:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.639928 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.639975 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.639988 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.640005 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.640018 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:54Z","lastTransitionTime":"2025-12-02T14:16:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.742782 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.742846 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.742864 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.742887 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.742908 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:54Z","lastTransitionTime":"2025-12-02T14:16:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.846214 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.846302 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.846326 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.846360 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.846382 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:54Z","lastTransitionTime":"2025-12-02T14:16:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.949064 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.949411 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.949601 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.949777 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:54 crc kubenswrapper[4902]: I1202 14:16:54.950055 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:54Z","lastTransitionTime":"2025-12-02T14:16:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.052622 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.052919 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.053007 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.053121 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.053217 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:55Z","lastTransitionTime":"2025-12-02T14:16:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.105923 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.106035 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:55 crc kubenswrapper[4902]: E1202 14:16:55.106111 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:55 crc kubenswrapper[4902]: E1202 14:16:55.106202 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.106617 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:55 crc kubenswrapper[4902]: E1202 14:16:55.106774 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.156028 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.156088 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.156106 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.156133 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.156153 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:55Z","lastTransitionTime":"2025-12-02T14:16:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.258839 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.258895 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.258912 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.258938 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.258969 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:55Z","lastTransitionTime":"2025-12-02T14:16:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.362461 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.362541 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.362593 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.362619 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.362637 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:55Z","lastTransitionTime":"2025-12-02T14:16:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.465443 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.465501 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.465520 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.465543 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.465560 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:55Z","lastTransitionTime":"2025-12-02T14:16:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.568303 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.568357 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.568377 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.568399 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.568417 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:55Z","lastTransitionTime":"2025-12-02T14:16:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.670299 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.670630 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.670719 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.670814 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.670897 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:55Z","lastTransitionTime":"2025-12-02T14:16:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.773964 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.774003 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.774014 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.774032 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.774105 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:55Z","lastTransitionTime":"2025-12-02T14:16:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.877181 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.877226 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.877239 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.877256 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.877267 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:55Z","lastTransitionTime":"2025-12-02T14:16:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.979194 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.979246 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.979257 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.979275 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:55 crc kubenswrapper[4902]: I1202 14:16:55.979287 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:55Z","lastTransitionTime":"2025-12-02T14:16:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.082779 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.082853 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.082870 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.082893 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.082911 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:56Z","lastTransitionTime":"2025-12-02T14:16:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.106924 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:56 crc kubenswrapper[4902]: E1202 14:16:56.107157 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.186159 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.186236 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.186260 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.186291 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.186315 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:56Z","lastTransitionTime":"2025-12-02T14:16:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.288010 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.288043 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.288051 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.288064 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.288074 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:56Z","lastTransitionTime":"2025-12-02T14:16:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.390673 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.390733 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.390750 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.390774 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.390789 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:56Z","lastTransitionTime":"2025-12-02T14:16:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.492975 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.493967 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.494189 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.494384 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.494600 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:56Z","lastTransitionTime":"2025-12-02T14:16:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.597519 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.597605 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.597628 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.597653 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.597678 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:56Z","lastTransitionTime":"2025-12-02T14:16:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.700755 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.701123 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.701473 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.701896 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.702281 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:56Z","lastTransitionTime":"2025-12-02T14:16:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.805454 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.805509 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.805528 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.805553 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.805722 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:56Z","lastTransitionTime":"2025-12-02T14:16:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.908548 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.908648 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.908666 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.908690 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:56 crc kubenswrapper[4902]: I1202 14:16:56.908706 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:56Z","lastTransitionTime":"2025-12-02T14:16:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.011771 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.011838 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.011857 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.011881 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.011898 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:57Z","lastTransitionTime":"2025-12-02T14:16:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.105685 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.105714 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.105738 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:57 crc kubenswrapper[4902]: E1202 14:16:57.105827 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:57 crc kubenswrapper[4902]: E1202 14:16:57.106448 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:57 crc kubenswrapper[4902]: E1202 14:16:57.106653 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.106805 4902 scope.go:117] "RemoveContainer" containerID="b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.113765 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.113833 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.113853 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.113877 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.113901 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:57Z","lastTransitionTime":"2025-12-02T14:16:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.131232 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.149457 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.167864 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.184122 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.197984 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.211129 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.216686 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.216735 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.216748 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.216765 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.216777 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:57Z","lastTransitionTime":"2025-12-02T14:16:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.227672 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.239984 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.251497 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.263944 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.276024 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.297576 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.316759 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.318560 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.318661 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.318680 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.318704 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.318722 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:57Z","lastTransitionTime":"2025-12-02T14:16:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.352349 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\" 6369 services_controller.go:360] Finished syncing service api on namespace openshift-apiserver for network=default : 1.468223ms\\\\nI1202 14:16:45.629239 6369 obj_retry.go:551] Creating *factory.egressNode crc took: 10.74682ms\\\\nI1202 14:16:45.629260 6369 factory.go:1336] Added *v1.Node event handler 7\\\\nI1202 14:16:45.629291 6369 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629301 6369 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 14:16:45.629307 6369 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 14:16:45.629325 6369 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 14:16:45.629340 6369 factory.go:656] Stopping watch factory\\\\nI1202 14:16:45.629351 6369 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629350 6369 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 14:16:45.629376 6369 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 14:16:45.629507 6369 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1202 14:16:45.629594 6369 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1202 14:16:45.629636 6369 ovnkube.go:599] Stopped ovnkube\\\\nI1202 14:16:45.629663 6369 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 14:16:45.629736 6369 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-q55jp_openshift-ovn-kubernetes(9379a49e-d66c-4224-a564-f00d4cadd9ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.371362 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.381223 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.393991 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.420980 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.421010 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.421018 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.421031 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.421040 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:57Z","lastTransitionTime":"2025-12-02T14:16:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.452658 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/1.log" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.454846 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerStarted","Data":"56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7"} Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.454975 4902 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.467883 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.479483 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.490799 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.504446 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.523049 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.523073 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.523081 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.523097 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.523106 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:57Z","lastTransitionTime":"2025-12-02T14:16:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.523644 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.536214 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.556938 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.570588 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.584040 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.594299 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.606088 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.622154 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.632322 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.632362 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.632370 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.632382 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.632391 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:57Z","lastTransitionTime":"2025-12-02T14:16:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.639733 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.655678 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\" 6369 services_controller.go:360] Finished syncing service api on namespace openshift-apiserver for network=default : 1.468223ms\\\\nI1202 14:16:45.629239 6369 obj_retry.go:551] Creating *factory.egressNode crc took: 10.74682ms\\\\nI1202 14:16:45.629260 6369 factory.go:1336] Added *v1.Node event handler 7\\\\nI1202 14:16:45.629291 6369 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629301 6369 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 14:16:45.629307 6369 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 14:16:45.629325 6369 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 14:16:45.629340 6369 factory.go:656] Stopping watch factory\\\\nI1202 14:16:45.629351 6369 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629350 6369 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 14:16:45.629376 6369 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 14:16:45.629507 6369 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1202 14:16:45.629594 6369 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1202 14:16:45.629636 6369 ovnkube.go:599] Stopped ovnkube\\\\nI1202 14:16:45.629663 6369 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 14:16:45.629736 6369 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.668277 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.677958 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.687029 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.735077 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.735120 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.735131 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.735147 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.735157 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:57Z","lastTransitionTime":"2025-12-02T14:16:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.838001 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.838037 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.838045 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.838059 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.838070 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:57Z","lastTransitionTime":"2025-12-02T14:16:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.940894 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.940947 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.940959 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.940976 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:57 crc kubenswrapper[4902]: I1202 14:16:57.940988 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:57Z","lastTransitionTime":"2025-12-02T14:16:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.044321 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.044384 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.044403 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.044427 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.044447 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:58Z","lastTransitionTime":"2025-12-02T14:16:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.106026 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:16:58 crc kubenswrapper[4902]: E1202 14:16:58.106495 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.146938 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.147274 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.147468 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.147681 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.147825 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:58Z","lastTransitionTime":"2025-12-02T14:16:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.250684 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.250735 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.250752 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.250774 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.250791 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:58Z","lastTransitionTime":"2025-12-02T14:16:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.354123 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.354170 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.354183 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.354202 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.354212 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:58Z","lastTransitionTime":"2025-12-02T14:16:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.456383 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.456420 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.456433 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.456449 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.456460 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:58Z","lastTransitionTime":"2025-12-02T14:16:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.459962 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/2.log" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.460716 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/1.log" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.463655 4902 generic.go:334] "Generic (PLEG): container finished" podID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerID="56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7" exitCode=1 Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.463677 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerDied","Data":"56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7"} Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.463713 4902 scope.go:117] "RemoveContainer" containerID="b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.464391 4902 scope.go:117] "RemoveContainer" containerID="56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7" Dec 02 14:16:58 crc kubenswrapper[4902]: E1202 14:16:58.464584 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q55jp_openshift-ovn-kubernetes(9379a49e-d66c-4224-a564-f00d4cadd9ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.490147 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.511238 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.525033 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.544739 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.559192 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.559245 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.559253 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.559266 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.559275 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:58Z","lastTransitionTime":"2025-12-02T14:16:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.566680 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.579349 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.598092 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\" 6369 services_controller.go:360] Finished syncing service api on namespace openshift-apiserver for network=default : 1.468223ms\\\\nI1202 14:16:45.629239 6369 obj_retry.go:551] Creating *factory.egressNode crc took: 10.74682ms\\\\nI1202 14:16:45.629260 6369 factory.go:1336] Added *v1.Node event handler 7\\\\nI1202 14:16:45.629291 6369 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629301 6369 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 14:16:45.629307 6369 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 14:16:45.629325 6369 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 14:16:45.629340 6369 factory.go:656] Stopping watch factory\\\\nI1202 14:16:45.629351 6369 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629350 6369 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 14:16:45.629376 6369 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 14:16:45.629507 6369 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1202 14:16:45.629594 6369 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1202 14:16:45.629636 6369 ovnkube.go:599] Stopped ovnkube\\\\nI1202 14:16:45.629663 6369 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 14:16:45.629736 6369 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:58Z\\\",\\\"message\\\":\\\"vent Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z]\\\\nI1202 14:16:57.928738 6569 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 14:16:57.928191 6569 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-qhrkh\\\\nI1202 14:16:57.928743 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.611885 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.622212 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.632583 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.647372 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.665387 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.665436 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.665447 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.665463 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.665474 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:58Z","lastTransitionTime":"2025-12-02T14:16:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.669361 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.684696 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.701005 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.717457 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.731969 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.742787 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:58Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.767727 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.767779 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.767793 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.767815 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.767830 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:58Z","lastTransitionTime":"2025-12-02T14:16:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.870533 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.870621 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.870639 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.870663 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.870680 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:58Z","lastTransitionTime":"2025-12-02T14:16:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.975641 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.975734 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.975757 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.975789 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:58 crc kubenswrapper[4902]: I1202 14:16:58.975808 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:58Z","lastTransitionTime":"2025-12-02T14:16:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.078836 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.078916 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.078943 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.078991 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.079014 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:59Z","lastTransitionTime":"2025-12-02T14:16:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.106149 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.106279 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.106310 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.106347 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.106495 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.106552 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.129848 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.132111 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.132189 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.132214 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.132234 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.132265 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.132355 4902 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.132408 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.132421 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.132418 4902 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.132443 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:17:31.132418221 +0000 UTC m=+82.323726960 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.132432 4902 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.132488 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:17:31.132474563 +0000 UTC m=+82.323783272 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.132501 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:17:31.132496154 +0000 UTC m=+82.323804863 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.132373 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.132514 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.132521 4902 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.132529 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 14:17:31.132510664 +0000 UTC m=+82.323819413 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:59 crc kubenswrapper[4902]: E1202 14:16:59.132547 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 14:17:31.132538785 +0000 UTC m=+82.323847494 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.142139 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.164285 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b347848024e89f69175544665b3b34ccb263ff38df7bb6250b7bdc85df904c06\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\" 6369 services_controller.go:360] Finished syncing service api on namespace openshift-apiserver for network=default : 1.468223ms\\\\nI1202 14:16:45.629239 6369 obj_retry.go:551] Creating *factory.egressNode crc took: 10.74682ms\\\\nI1202 14:16:45.629260 6369 factory.go:1336] Added *v1.Node event handler 7\\\\nI1202 14:16:45.629291 6369 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629301 6369 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 14:16:45.629307 6369 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 14:16:45.629325 6369 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 14:16:45.629340 6369 factory.go:656] Stopping watch factory\\\\nI1202 14:16:45.629351 6369 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 14:16:45.629350 6369 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 14:16:45.629376 6369 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 14:16:45.629507 6369 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1202 14:16:45.629594 6369 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1202 14:16:45.629636 6369 ovnkube.go:599] Stopped ovnkube\\\\nI1202 14:16:45.629663 6369 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 14:16:45.629736 6369 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:58Z\\\",\\\"message\\\":\\\"vent Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z]\\\\nI1202 14:16:57.928738 6569 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 14:16:57.928191 6569 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-qhrkh\\\\nI1202 14:16:57.928743 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.179334 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.181012 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.181035 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.181045 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.181060 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.181071 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:59Z","lastTransitionTime":"2025-12-02T14:16:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.190771 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.200359 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.212941 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.225117 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.236266 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.248426 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.262116 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.276168 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.283152 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.283189 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.283197 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.283212 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.283224 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:59Z","lastTransitionTime":"2025-12-02T14:16:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.287436 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.304358 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.320881 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.333431 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.354961 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:59Z is after 2025-08-24T17:21:41Z" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.386143 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.386205 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.386217 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.386234 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.386245 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:59Z","lastTransitionTime":"2025-12-02T14:16:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.471051 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/2.log" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.488706 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.488767 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.488783 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.488805 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.488823 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:59Z","lastTransitionTime":"2025-12-02T14:16:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.592175 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.592256 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.592278 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.592305 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.592326 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:59Z","lastTransitionTime":"2025-12-02T14:16:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.695741 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.695812 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.695829 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.696259 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.696321 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:59Z","lastTransitionTime":"2025-12-02T14:16:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.799544 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.799633 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.799650 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.799672 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.799689 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:59Z","lastTransitionTime":"2025-12-02T14:16:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.902985 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.903330 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.903462 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.903731 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:16:59 crc kubenswrapper[4902]: I1202 14:16:59.903899 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:16:59Z","lastTransitionTime":"2025-12-02T14:16:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.007354 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.007455 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.007472 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.007495 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.007510 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:00Z","lastTransitionTime":"2025-12-02T14:17:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.053909 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.055259 4902 scope.go:117] "RemoveContainer" containerID="56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7" Dec 02 14:17:00 crc kubenswrapper[4902]: E1202 14:17:00.055517 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q55jp_openshift-ovn-kubernetes(9379a49e-d66c-4224-a564-f00d4cadd9ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.071679 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.096058 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.106169 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:00 crc kubenswrapper[4902]: E1202 14:17:00.106468 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.110869 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.110958 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.110984 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.111019 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.111043 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:00Z","lastTransitionTime":"2025-12-02T14:17:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.114944 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.135777 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.154296 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.172749 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.189095 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.206875 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.214534 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.214622 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.214641 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.214666 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.214687 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:00Z","lastTransitionTime":"2025-12-02T14:17:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.227527 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.246526 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.272266 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.291882 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.307261 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.317850 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.318109 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.318203 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.318286 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.318377 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:00Z","lastTransitionTime":"2025-12-02T14:17:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.321384 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.350380 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:58Z\\\",\\\"message\\\":\\\"vent Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z]\\\\nI1202 14:16:57.928738 6569 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 14:16:57.928191 6569 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-qhrkh\\\\nI1202 14:16:57.928743 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q55jp_openshift-ovn-kubernetes(9379a49e-d66c-4224-a564-f00d4cadd9ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.376219 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.393003 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.421490 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.421537 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.421554 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.421616 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.421639 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:00Z","lastTransitionTime":"2025-12-02T14:17:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.525285 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.525348 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.525368 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.525393 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.525412 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:00Z","lastTransitionTime":"2025-12-02T14:17:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.628344 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.628395 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.628408 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.628431 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.628443 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:00Z","lastTransitionTime":"2025-12-02T14:17:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.731253 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.731347 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.731373 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.731407 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.731434 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:00Z","lastTransitionTime":"2025-12-02T14:17:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.734374 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.747642 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.776516 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.803105 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.833864 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.833943 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.833964 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.833996 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.834018 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:00Z","lastTransitionTime":"2025-12-02T14:17:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.838754 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:58Z\\\",\\\"message\\\":\\\"vent Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z]\\\\nI1202 14:16:57.928738 6569 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 14:16:57.928191 6569 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-qhrkh\\\\nI1202 14:16:57.928743 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q55jp_openshift-ovn-kubernetes(9379a49e-d66c-4224-a564-f00d4cadd9ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.856614 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.868938 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.881868 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.903321 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.919402 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.936425 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.937599 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.937643 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.937659 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.937681 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.937697 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:00Z","lastTransitionTime":"2025-12-02T14:17:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.951807 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.968498 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.984315 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:00 crc kubenswrapper[4902]: I1202 14:17:00.993444 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:00Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.005027 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:01Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.022248 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:01Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.037514 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:01Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.040337 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.040421 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.040446 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.040475 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.040497 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:01Z","lastTransitionTime":"2025-12-02T14:17:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.058072 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:01Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.106278 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.106409 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:01 crc kubenswrapper[4902]: E1202 14:17:01.106553 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:01 crc kubenswrapper[4902]: E1202 14:17:01.106697 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.106739 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:01 crc kubenswrapper[4902]: E1202 14:17:01.106914 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.143265 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.143312 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.143324 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.143346 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.143359 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:01Z","lastTransitionTime":"2025-12-02T14:17:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.250346 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.250431 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.250477 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.250520 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.250549 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:01Z","lastTransitionTime":"2025-12-02T14:17:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.283055 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.283157 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.283177 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.283205 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.283223 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:01Z","lastTransitionTime":"2025-12-02T14:17:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:01 crc kubenswrapper[4902]: E1202 14:17:01.304103 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:01Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.309243 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.309479 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.309508 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.309603 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.309628 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:01Z","lastTransitionTime":"2025-12-02T14:17:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:01 crc kubenswrapper[4902]: E1202 14:17:01.325725 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:01Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.330712 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.330771 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.330822 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.330858 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.330882 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:01Z","lastTransitionTime":"2025-12-02T14:17:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:01 crc kubenswrapper[4902]: E1202 14:17:01.345872 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:01Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.350053 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.350117 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.350135 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.350162 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.350181 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:01Z","lastTransitionTime":"2025-12-02T14:17:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:01 crc kubenswrapper[4902]: E1202 14:17:01.367339 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:01Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.372139 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.372200 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.372218 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.372247 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.372266 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:01Z","lastTransitionTime":"2025-12-02T14:17:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:01 crc kubenswrapper[4902]: E1202 14:17:01.387823 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:01Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:01 crc kubenswrapper[4902]: E1202 14:17:01.387952 4902 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.389834 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.389904 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.389934 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.389966 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.389990 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:01Z","lastTransitionTime":"2025-12-02T14:17:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.492508 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.492592 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.492614 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.492641 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.492662 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:01Z","lastTransitionTime":"2025-12-02T14:17:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.596324 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.596368 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.596380 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.596402 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.596415 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:01Z","lastTransitionTime":"2025-12-02T14:17:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.699087 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.699164 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.699183 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.699212 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.699231 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:01Z","lastTransitionTime":"2025-12-02T14:17:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.802463 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.802521 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.802541 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.802602 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.802624 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:01Z","lastTransitionTime":"2025-12-02T14:17:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.905665 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.905705 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.905714 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.905728 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:01 crc kubenswrapper[4902]: I1202 14:17:01.905739 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:01Z","lastTransitionTime":"2025-12-02T14:17:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.008861 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.008900 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.008908 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.008925 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.008934 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:02Z","lastTransitionTime":"2025-12-02T14:17:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.106081 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:02 crc kubenswrapper[4902]: E1202 14:17:02.106208 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.110681 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.110702 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.110710 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.110721 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.110730 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:02Z","lastTransitionTime":"2025-12-02T14:17:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.214116 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.214430 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.214599 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.214756 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.214930 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:02Z","lastTransitionTime":"2025-12-02T14:17:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.318065 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.318115 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.318125 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.318143 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.318154 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:02Z","lastTransitionTime":"2025-12-02T14:17:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.420511 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.420826 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.420915 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.421005 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.421087 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:02Z","lastTransitionTime":"2025-12-02T14:17:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.471143 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs\") pod \"network-metrics-daemon-tlnwn\" (UID: \"b63b8b19-f855-4038-891d-6bfd1e5021de\") " pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:02 crc kubenswrapper[4902]: E1202 14:17:02.471246 4902 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:17:02 crc kubenswrapper[4902]: E1202 14:17:02.471491 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs podName:b63b8b19-f855-4038-891d-6bfd1e5021de nodeName:}" failed. No retries permitted until 2025-12-02 14:17:18.47147452 +0000 UTC m=+69.662783229 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs") pod "network-metrics-daemon-tlnwn" (UID: "b63b8b19-f855-4038-891d-6bfd1e5021de") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.523008 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.523048 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.523057 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.523073 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.523083 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:02Z","lastTransitionTime":"2025-12-02T14:17:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.625305 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.625343 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.625353 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.625368 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.625377 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:02Z","lastTransitionTime":"2025-12-02T14:17:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.727797 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.727841 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.727852 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.727869 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.727894 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:02Z","lastTransitionTime":"2025-12-02T14:17:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.831033 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.831107 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.831131 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.831162 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.831182 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:02Z","lastTransitionTime":"2025-12-02T14:17:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.934202 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.934247 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.934260 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.934277 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:02 crc kubenswrapper[4902]: I1202 14:17:02.934290 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:02Z","lastTransitionTime":"2025-12-02T14:17:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.036273 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.036324 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.036334 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.036351 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.036362 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:03Z","lastTransitionTime":"2025-12-02T14:17:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.106230 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.106265 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.106347 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:03 crc kubenswrapper[4902]: E1202 14:17:03.106461 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:03 crc kubenswrapper[4902]: E1202 14:17:03.106677 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:03 crc kubenswrapper[4902]: E1202 14:17:03.106798 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.139650 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.139684 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.139696 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.139714 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.139726 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:03Z","lastTransitionTime":"2025-12-02T14:17:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.242733 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.242781 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.242798 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.242820 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.242838 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:03Z","lastTransitionTime":"2025-12-02T14:17:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.344691 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.345104 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.345290 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.345482 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.345707 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:03Z","lastTransitionTime":"2025-12-02T14:17:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.449085 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.449138 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.449149 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.449164 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.449174 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:03Z","lastTransitionTime":"2025-12-02T14:17:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.552670 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.552985 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.553095 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.553254 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.553370 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:03Z","lastTransitionTime":"2025-12-02T14:17:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.656149 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.656200 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.656211 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.656228 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.656236 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:03Z","lastTransitionTime":"2025-12-02T14:17:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.759677 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.759754 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.759779 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.759809 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.759830 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:03Z","lastTransitionTime":"2025-12-02T14:17:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.861629 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.861675 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.861686 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.861704 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.861716 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:03Z","lastTransitionTime":"2025-12-02T14:17:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.965327 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.965395 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.965413 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.965439 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:03 crc kubenswrapper[4902]: I1202 14:17:03.965457 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:03Z","lastTransitionTime":"2025-12-02T14:17:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.068800 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.068879 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.068898 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.068928 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.068948 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:04Z","lastTransitionTime":"2025-12-02T14:17:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.106441 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:04 crc kubenswrapper[4902]: E1202 14:17:04.106626 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.172632 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.172710 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.172731 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.172759 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.172779 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:04Z","lastTransitionTime":"2025-12-02T14:17:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.275911 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.275986 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.276013 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.276045 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.276070 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:04Z","lastTransitionTime":"2025-12-02T14:17:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.379814 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.379878 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.379897 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.379922 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.379946 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:04Z","lastTransitionTime":"2025-12-02T14:17:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.482852 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.483250 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.483494 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.483880 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.483933 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:04Z","lastTransitionTime":"2025-12-02T14:17:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.586304 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.586342 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.586353 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.586371 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.586382 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:04Z","lastTransitionTime":"2025-12-02T14:17:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.689544 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.690170 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.690358 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.690523 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.690662 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:04Z","lastTransitionTime":"2025-12-02T14:17:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.793548 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.793628 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.793642 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.793684 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.793700 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:04Z","lastTransitionTime":"2025-12-02T14:17:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.896396 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.896854 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.897012 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.897143 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.897279 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:04Z","lastTransitionTime":"2025-12-02T14:17:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.999828 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:04 crc kubenswrapper[4902]: I1202 14:17:04.999890 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:04.999910 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:04.999936 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:04.999957 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:04Z","lastTransitionTime":"2025-12-02T14:17:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.103736 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.103817 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.103836 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.103864 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.103884 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:05Z","lastTransitionTime":"2025-12-02T14:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.106064 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.106158 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:05 crc kubenswrapper[4902]: E1202 14:17:05.106250 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.106160 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:05 crc kubenswrapper[4902]: E1202 14:17:05.106372 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:05 crc kubenswrapper[4902]: E1202 14:17:05.106462 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.206190 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.206241 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.206253 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.206269 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.206281 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:05Z","lastTransitionTime":"2025-12-02T14:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.308933 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.308984 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.308998 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.309019 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.309034 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:05Z","lastTransitionTime":"2025-12-02T14:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.413735 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.413812 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.413830 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.413856 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.413875 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:05Z","lastTransitionTime":"2025-12-02T14:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.516877 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.517197 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.517206 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.517220 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.517229 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:05Z","lastTransitionTime":"2025-12-02T14:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.619790 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.619844 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.619861 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.619884 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.619901 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:05Z","lastTransitionTime":"2025-12-02T14:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.722467 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.722511 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.722522 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.722538 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.722551 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:05Z","lastTransitionTime":"2025-12-02T14:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.825195 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.825248 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.825256 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.825272 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.825295 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:05Z","lastTransitionTime":"2025-12-02T14:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.928412 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.928520 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.928537 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.928609 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:05 crc kubenswrapper[4902]: I1202 14:17:05.928633 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:05Z","lastTransitionTime":"2025-12-02T14:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.031547 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.031645 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.031664 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.031690 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.031709 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:06Z","lastTransitionTime":"2025-12-02T14:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.106396 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:06 crc kubenswrapper[4902]: E1202 14:17:06.106614 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.135079 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.135204 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.135225 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.135284 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.135302 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:06Z","lastTransitionTime":"2025-12-02T14:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.238387 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.238447 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.238466 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.238491 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.238510 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:06Z","lastTransitionTime":"2025-12-02T14:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.341680 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.341749 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.341780 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.341812 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.341834 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:06Z","lastTransitionTime":"2025-12-02T14:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.444299 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.444325 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.444332 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.444346 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.444360 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:06Z","lastTransitionTime":"2025-12-02T14:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.546848 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.546913 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.546931 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.546955 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.546978 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:06Z","lastTransitionTime":"2025-12-02T14:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.650096 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.650156 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.650168 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.650185 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.650197 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:06Z","lastTransitionTime":"2025-12-02T14:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.752361 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.752412 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.752430 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.752452 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.752468 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:06Z","lastTransitionTime":"2025-12-02T14:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.855420 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.855493 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.855505 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.855522 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.855533 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:06Z","lastTransitionTime":"2025-12-02T14:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.958448 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.958482 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.958490 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.958505 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:06 crc kubenswrapper[4902]: I1202 14:17:06.958514 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:06Z","lastTransitionTime":"2025-12-02T14:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.061639 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.061746 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.061767 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.061791 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.061809 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:07Z","lastTransitionTime":"2025-12-02T14:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.106559 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.106638 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.106656 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:07 crc kubenswrapper[4902]: E1202 14:17:07.106862 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:07 crc kubenswrapper[4902]: E1202 14:17:07.106963 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:07 crc kubenswrapper[4902]: E1202 14:17:07.107071 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.164046 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.164102 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.164118 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.164144 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.164161 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:07Z","lastTransitionTime":"2025-12-02T14:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.267515 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.267615 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.267636 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.267661 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.267679 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:07Z","lastTransitionTime":"2025-12-02T14:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.370629 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.370695 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.370734 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.370770 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.370793 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:07Z","lastTransitionTime":"2025-12-02T14:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.508170 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.508229 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.508243 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.508264 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.508279 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:07Z","lastTransitionTime":"2025-12-02T14:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.612642 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.612682 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.612692 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.612713 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.612725 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:07Z","lastTransitionTime":"2025-12-02T14:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.715905 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.716234 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.716374 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.716488 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.716615 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:07Z","lastTransitionTime":"2025-12-02T14:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.820741 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.820845 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.820867 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.820897 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.820919 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:07Z","lastTransitionTime":"2025-12-02T14:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.924734 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.924789 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.924804 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.924824 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:07 crc kubenswrapper[4902]: I1202 14:17:07.924838 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:07Z","lastTransitionTime":"2025-12-02T14:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.027480 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.027539 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.027558 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.027614 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.027634 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:08Z","lastTransitionTime":"2025-12-02T14:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.105808 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:08 crc kubenswrapper[4902]: E1202 14:17:08.106610 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.131082 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.131154 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.131179 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.131210 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.131232 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:08Z","lastTransitionTime":"2025-12-02T14:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.234498 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.234552 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.234610 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.234638 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.234658 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:08Z","lastTransitionTime":"2025-12-02T14:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.337375 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.337450 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.337468 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.337492 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.337510 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:08Z","lastTransitionTime":"2025-12-02T14:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.440786 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.440859 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.440883 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.440912 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.440938 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:08Z","lastTransitionTime":"2025-12-02T14:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.542648 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.542719 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.542739 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.542765 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.542788 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:08Z","lastTransitionTime":"2025-12-02T14:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.645903 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.645944 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.645955 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.645972 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.645984 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:08Z","lastTransitionTime":"2025-12-02T14:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.749268 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.749327 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.749338 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.749355 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.749369 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:08Z","lastTransitionTime":"2025-12-02T14:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.851845 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.851910 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.851927 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.851953 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.851971 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:08Z","lastTransitionTime":"2025-12-02T14:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.954599 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.954750 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.954794 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.954826 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:08 crc kubenswrapper[4902]: I1202 14:17:08.954849 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:08Z","lastTransitionTime":"2025-12-02T14:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.057755 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.057795 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.057810 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.057829 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.057842 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:09Z","lastTransitionTime":"2025-12-02T14:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.106186 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.106357 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.106544 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:09 crc kubenswrapper[4902]: E1202 14:17:09.106901 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:09 crc kubenswrapper[4902]: E1202 14:17:09.106724 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:09 crc kubenswrapper[4902]: E1202 14:17:09.106548 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.146186 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.162162 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.162321 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.162347 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.162419 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.162446 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:09Z","lastTransitionTime":"2025-12-02T14:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.172260 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.205035 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:58Z\\\",\\\"message\\\":\\\"vent Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z]\\\\nI1202 14:16:57.928738 6569 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 14:16:57.928191 6569 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-qhrkh\\\\nI1202 14:16:57.928743 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q55jp_openshift-ovn-kubernetes(9379a49e-d66c-4224-a564-f00d4cadd9ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.222852 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ca28c98-d546-4859-bac7-2fed9414d34d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfafb80a72413884fdfc5e2da797d7d49b920ec54d292e8047453423f1148b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9a316e8c2c059475ace40728063bbcaf73c55837036254e0989f1fc6ae35fb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://006e342754cff26c4bd073a07cbe4e59fcecdee9623d669a9866b6e838455fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.237506 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.250453 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.259529 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.265266 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.265291 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.265299 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.265313 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.265324 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:09Z","lastTransitionTime":"2025-12-02T14:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.270223 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.281709 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.307337 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.335879 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.348715 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.360015 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.367539 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.367784 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.367893 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.368001 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.368133 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:09Z","lastTransitionTime":"2025-12-02T14:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.370812 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.382975 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.393217 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.401948 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.414185 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:09Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.469816 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.469854 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.469864 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.469883 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.469896 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:09Z","lastTransitionTime":"2025-12-02T14:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.571888 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.571933 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.571946 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.571960 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.571970 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:09Z","lastTransitionTime":"2025-12-02T14:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.675634 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.675697 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.675716 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.675739 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.676364 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:09Z","lastTransitionTime":"2025-12-02T14:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.780006 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.780065 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.780083 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.780108 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.780125 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:09Z","lastTransitionTime":"2025-12-02T14:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.882653 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.882713 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.882729 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.882757 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.882775 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:09Z","lastTransitionTime":"2025-12-02T14:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.986399 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.986445 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.986461 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.986483 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:09 crc kubenswrapper[4902]: I1202 14:17:09.986500 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:09Z","lastTransitionTime":"2025-12-02T14:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.089789 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.089891 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.089915 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.089942 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.089962 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:10Z","lastTransitionTime":"2025-12-02T14:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.106365 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:10 crc kubenswrapper[4902]: E1202 14:17:10.106598 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.193451 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.194139 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.194386 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.194692 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.195045 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:10Z","lastTransitionTime":"2025-12-02T14:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.298354 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.298390 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.298401 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.298418 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.298429 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:10Z","lastTransitionTime":"2025-12-02T14:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.401037 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.401121 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.401136 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.401155 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.401168 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:10Z","lastTransitionTime":"2025-12-02T14:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.504369 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.504789 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.505043 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.505279 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.505971 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:10Z","lastTransitionTime":"2025-12-02T14:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.609644 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.610017 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.610152 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.610290 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.610403 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:10Z","lastTransitionTime":"2025-12-02T14:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.713592 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.713995 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.714210 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.714448 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.714727 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:10Z","lastTransitionTime":"2025-12-02T14:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.818246 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.818302 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.818324 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.818353 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.818374 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:10Z","lastTransitionTime":"2025-12-02T14:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.921534 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.921633 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.921678 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.921703 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:10 crc kubenswrapper[4902]: I1202 14:17:10.921721 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:10Z","lastTransitionTime":"2025-12-02T14:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.024294 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.024349 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.024360 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.024374 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.024407 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:11Z","lastTransitionTime":"2025-12-02T14:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.108437 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.108747 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.108963 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:11 crc kubenswrapper[4902]: E1202 14:17:11.108952 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:11 crc kubenswrapper[4902]: E1202 14:17:11.109274 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:11 crc kubenswrapper[4902]: E1202 14:17:11.109623 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.126988 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.127034 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.127047 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.127066 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.127077 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:11Z","lastTransitionTime":"2025-12-02T14:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.230152 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.230195 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.230214 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.230238 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.230259 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:11Z","lastTransitionTime":"2025-12-02T14:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.334770 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.335202 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.335357 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.335542 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.335754 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:11Z","lastTransitionTime":"2025-12-02T14:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.439515 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.439583 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.439595 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.439615 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.439630 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:11Z","lastTransitionTime":"2025-12-02T14:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.453544 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.453598 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.453611 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.453626 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.453639 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:11Z","lastTransitionTime":"2025-12-02T14:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:11 crc kubenswrapper[4902]: E1202 14:17:11.478180 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:11Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.487120 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.487453 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.487611 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.487786 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.487938 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:11Z","lastTransitionTime":"2025-12-02T14:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:11 crc kubenswrapper[4902]: E1202 14:17:11.515789 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:11Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.524373 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.524488 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.524549 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.524644 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.524710 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:11Z","lastTransitionTime":"2025-12-02T14:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:11 crc kubenswrapper[4902]: E1202 14:17:11.549085 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:11Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.555055 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.555121 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.555134 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.555171 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.555186 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:11Z","lastTransitionTime":"2025-12-02T14:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:11 crc kubenswrapper[4902]: E1202 14:17:11.571028 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:11Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.576384 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.576429 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.576439 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.576461 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.576473 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:11Z","lastTransitionTime":"2025-12-02T14:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:11 crc kubenswrapper[4902]: E1202 14:17:11.599675 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:11Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:11 crc kubenswrapper[4902]: E1202 14:17:11.599975 4902 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.602269 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.602325 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.602365 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.602394 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.602428 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:11Z","lastTransitionTime":"2025-12-02T14:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.705867 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.705917 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.705925 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.705940 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.705949 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:11Z","lastTransitionTime":"2025-12-02T14:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.812069 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.812098 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.812107 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.812136 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.812147 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:11Z","lastTransitionTime":"2025-12-02T14:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.914412 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.914498 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.914540 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.914604 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:11 crc kubenswrapper[4902]: I1202 14:17:11.914625 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:11Z","lastTransitionTime":"2025-12-02T14:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.017330 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.017714 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.017942 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.018167 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.018378 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:12Z","lastTransitionTime":"2025-12-02T14:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.107464 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.108526 4902 scope.go:117] "RemoveContainer" containerID="56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7" Dec 02 14:17:12 crc kubenswrapper[4902]: E1202 14:17:12.108905 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q55jp_openshift-ovn-kubernetes(9379a49e-d66c-4224-a564-f00d4cadd9ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" Dec 02 14:17:12 crc kubenswrapper[4902]: E1202 14:17:12.109126 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.121148 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.121185 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.121194 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.121207 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.121216 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:12Z","lastTransitionTime":"2025-12-02T14:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.224091 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.224153 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.224167 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.224185 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.224198 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:12Z","lastTransitionTime":"2025-12-02T14:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.326739 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.326784 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.326799 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.326817 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.326828 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:12Z","lastTransitionTime":"2025-12-02T14:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.429114 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.429184 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.429198 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.429213 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.429223 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:12Z","lastTransitionTime":"2025-12-02T14:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.531482 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.531534 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.531547 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.531581 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.531596 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:12Z","lastTransitionTime":"2025-12-02T14:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.634411 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.634485 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.634509 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.634538 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.634614 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:12Z","lastTransitionTime":"2025-12-02T14:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.737110 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.737153 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.737165 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.737181 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.737193 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:12Z","lastTransitionTime":"2025-12-02T14:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.839399 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.839428 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.839437 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.839451 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.839461 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:12Z","lastTransitionTime":"2025-12-02T14:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.944485 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.944540 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.944552 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.944590 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:12 crc kubenswrapper[4902]: I1202 14:17:12.944603 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:12Z","lastTransitionTime":"2025-12-02T14:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.047169 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.047207 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.047217 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.047235 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.047245 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:13Z","lastTransitionTime":"2025-12-02T14:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.106884 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.106940 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:13 crc kubenswrapper[4902]: E1202 14:17:13.106996 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:13 crc kubenswrapper[4902]: E1202 14:17:13.107091 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.107206 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:13 crc kubenswrapper[4902]: E1202 14:17:13.107324 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.149483 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.149512 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.149521 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.149535 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.149545 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:13Z","lastTransitionTime":"2025-12-02T14:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.252624 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.252674 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.252688 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.252718 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.252730 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:13Z","lastTransitionTime":"2025-12-02T14:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.354745 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.354915 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.354930 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.354947 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.354958 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:13Z","lastTransitionTime":"2025-12-02T14:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.457870 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.457909 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.457933 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.457948 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.457957 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:13Z","lastTransitionTime":"2025-12-02T14:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.559976 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.560010 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.560030 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.560047 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.560059 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:13Z","lastTransitionTime":"2025-12-02T14:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.662801 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.662842 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.662854 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.662870 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.662881 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:13Z","lastTransitionTime":"2025-12-02T14:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.765040 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.765071 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.765079 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.765092 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.765101 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:13Z","lastTransitionTime":"2025-12-02T14:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.869194 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.869226 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.869237 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.869252 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.869262 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:13Z","lastTransitionTime":"2025-12-02T14:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.971979 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.972048 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.972066 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.972090 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:13 crc kubenswrapper[4902]: I1202 14:17:13.972108 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:13Z","lastTransitionTime":"2025-12-02T14:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.074235 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.074271 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.074280 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.074295 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.074303 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:14Z","lastTransitionTime":"2025-12-02T14:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.105897 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:14 crc kubenswrapper[4902]: E1202 14:17:14.106003 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.176996 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.177037 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.177045 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.177060 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.177070 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:14Z","lastTransitionTime":"2025-12-02T14:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.279784 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.279835 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.279847 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.279862 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.279873 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:14Z","lastTransitionTime":"2025-12-02T14:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.382558 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.382650 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.382670 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.382698 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.382720 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:14Z","lastTransitionTime":"2025-12-02T14:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.484690 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.484742 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.484760 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.484783 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.484802 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:14Z","lastTransitionTime":"2025-12-02T14:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.587792 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.587841 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.587851 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.587867 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.587877 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:14Z","lastTransitionTime":"2025-12-02T14:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.691203 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.691286 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.691306 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.691331 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.691351 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:14Z","lastTransitionTime":"2025-12-02T14:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.793985 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.794053 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.794071 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.794098 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.794116 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:14Z","lastTransitionTime":"2025-12-02T14:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.897731 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.897786 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.897803 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.897821 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:14 crc kubenswrapper[4902]: I1202 14:17:14.897833 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:14Z","lastTransitionTime":"2025-12-02T14:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.000485 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.000528 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.000536 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.000550 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.000587 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:15Z","lastTransitionTime":"2025-12-02T14:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.103525 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.103614 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.103634 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.103657 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.103724 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:15Z","lastTransitionTime":"2025-12-02T14:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.106892 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.106961 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:15 crc kubenswrapper[4902]: E1202 14:17:15.107101 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.107169 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:15 crc kubenswrapper[4902]: E1202 14:17:15.107341 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:15 crc kubenswrapper[4902]: E1202 14:17:15.107474 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.205974 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.206032 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.206048 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.206071 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.206085 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:15Z","lastTransitionTime":"2025-12-02T14:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.310098 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.310165 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.310181 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.310204 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.310219 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:15Z","lastTransitionTime":"2025-12-02T14:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.412537 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.412592 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.412605 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.412620 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.412632 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:15Z","lastTransitionTime":"2025-12-02T14:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.515181 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.515241 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.515260 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.515282 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.515298 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:15Z","lastTransitionTime":"2025-12-02T14:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.618220 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.618266 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.618276 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.618294 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.618306 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:15Z","lastTransitionTime":"2025-12-02T14:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.721129 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.721179 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.721197 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.721221 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.721240 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:15Z","lastTransitionTime":"2025-12-02T14:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.823873 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.823930 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.823945 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.823964 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.823980 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:15Z","lastTransitionTime":"2025-12-02T14:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.926317 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.926374 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.926392 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.926414 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:15 crc kubenswrapper[4902]: I1202 14:17:15.926429 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:15Z","lastTransitionTime":"2025-12-02T14:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.029152 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.029225 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.029248 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.029280 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.029300 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:16Z","lastTransitionTime":"2025-12-02T14:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.106078 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:16 crc kubenswrapper[4902]: E1202 14:17:16.106208 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.132556 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.132651 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.132671 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.132701 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.132719 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:16Z","lastTransitionTime":"2025-12-02T14:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.235341 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.235409 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.235432 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.235468 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.235489 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:16Z","lastTransitionTime":"2025-12-02T14:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.338456 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.338491 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.338502 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.338519 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.338531 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:16Z","lastTransitionTime":"2025-12-02T14:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.441070 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.441108 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.441119 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.441133 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.441143 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:16Z","lastTransitionTime":"2025-12-02T14:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.543387 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.543422 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.543431 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.543451 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.543464 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:16Z","lastTransitionTime":"2025-12-02T14:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.645332 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.645368 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.645376 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.645388 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.645398 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:16Z","lastTransitionTime":"2025-12-02T14:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.747603 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.747642 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.747652 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.747667 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.747681 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:16Z","lastTransitionTime":"2025-12-02T14:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.850281 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.850317 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.850325 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.850339 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.850347 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:16Z","lastTransitionTime":"2025-12-02T14:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.952437 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.952481 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.952498 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.952517 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:16 crc kubenswrapper[4902]: I1202 14:17:16.952529 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:16Z","lastTransitionTime":"2025-12-02T14:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.055386 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.055541 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.055605 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.055634 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.055652 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:17Z","lastTransitionTime":"2025-12-02T14:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.105610 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.105649 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:17 crc kubenswrapper[4902]: E1202 14:17:17.105732 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:17 crc kubenswrapper[4902]: E1202 14:17:17.105905 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.105949 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:17 crc kubenswrapper[4902]: E1202 14:17:17.106031 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.120345 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.158344 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.158413 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.158429 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.158450 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.158468 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:17Z","lastTransitionTime":"2025-12-02T14:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.261335 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.261972 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.262017 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.262050 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.262070 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:17Z","lastTransitionTime":"2025-12-02T14:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.364670 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.364713 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.364722 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.364740 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.364751 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:17Z","lastTransitionTime":"2025-12-02T14:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.469048 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.469117 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.469136 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.469163 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.469180 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:17Z","lastTransitionTime":"2025-12-02T14:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.571354 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.571435 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.571453 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.571479 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.571495 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:17Z","lastTransitionTime":"2025-12-02T14:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.674289 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.674524 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.674534 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.674550 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.674584 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:17Z","lastTransitionTime":"2025-12-02T14:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.777076 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.777125 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.777136 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.777153 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.777167 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:17Z","lastTransitionTime":"2025-12-02T14:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.879867 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.880172 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.880266 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.880509 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.880601 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:17Z","lastTransitionTime":"2025-12-02T14:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.983024 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.983053 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.983061 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.983073 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:17 crc kubenswrapper[4902]: I1202 14:17:17.983082 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:17Z","lastTransitionTime":"2025-12-02T14:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.085615 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.085663 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.085680 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.085702 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.085718 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:18Z","lastTransitionTime":"2025-12-02T14:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.106531 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:18 crc kubenswrapper[4902]: E1202 14:17:18.106959 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.188687 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.188717 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.188728 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.188745 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.188755 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:18Z","lastTransitionTime":"2025-12-02T14:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.290864 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.290916 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.290982 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.291011 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.291029 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:18Z","lastTransitionTime":"2025-12-02T14:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.394744 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.394810 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.394837 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.394870 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.394897 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:18Z","lastTransitionTime":"2025-12-02T14:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.497706 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.497746 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.497757 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.497774 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.497786 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:18Z","lastTransitionTime":"2025-12-02T14:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.546082 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs\") pod \"network-metrics-daemon-tlnwn\" (UID: \"b63b8b19-f855-4038-891d-6bfd1e5021de\") " pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:18 crc kubenswrapper[4902]: E1202 14:17:18.546220 4902 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:17:18 crc kubenswrapper[4902]: E1202 14:17:18.546263 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs podName:b63b8b19-f855-4038-891d-6bfd1e5021de nodeName:}" failed. No retries permitted until 2025-12-02 14:17:50.546250512 +0000 UTC m=+101.737559211 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs") pod "network-metrics-daemon-tlnwn" (UID: "b63b8b19-f855-4038-891d-6bfd1e5021de") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.599765 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.599787 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.599797 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.599808 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.599818 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:18Z","lastTransitionTime":"2025-12-02T14:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.702994 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.703072 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.703095 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.703121 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.703140 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:18Z","lastTransitionTime":"2025-12-02T14:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.805510 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.805885 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.805910 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.805938 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.805956 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:18Z","lastTransitionTime":"2025-12-02T14:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.909199 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.909463 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.909717 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.909844 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:18 crc kubenswrapper[4902]: I1202 14:17:18.909961 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:18Z","lastTransitionTime":"2025-12-02T14:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.012426 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.012807 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.013040 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.013246 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.013460 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:19Z","lastTransitionTime":"2025-12-02T14:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.106408 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:19 crc kubenswrapper[4902]: E1202 14:17:19.106765 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.106867 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:19 crc kubenswrapper[4902]: E1202 14:17:19.107072 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.107263 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:19 crc kubenswrapper[4902]: E1202 14:17:19.107416 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.115382 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.115443 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.115470 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.115500 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.115519 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:19Z","lastTransitionTime":"2025-12-02T14:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.127685 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.147497 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.162376 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.178194 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.192278 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3062d831-55a3-4a9b-8eef-246c4ea5f9b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2bfd4f0f73119258643a647e8ee258ad4c08360b2c365a41d6de9966e2f56dc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd96368a989fdbf63188113b3df3433eb65bb3bbb1930e29f408bc2c8dc0a25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cd96368a989fdbf63188113b3df3433eb65bb3bbb1930e29f408bc2c8dc0a25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.217623 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.218122 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.218173 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.218185 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.218204 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.218216 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:19Z","lastTransitionTime":"2025-12-02T14:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.229544 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.261070 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:58Z\\\",\\\"message\\\":\\\"vent Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z]\\\\nI1202 14:16:57.928738 6569 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 14:16:57.928191 6569 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-qhrkh\\\\nI1202 14:16:57.928743 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q55jp_openshift-ovn-kubernetes(9379a49e-d66c-4224-a564-f00d4cadd9ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.272918 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ca28c98-d546-4859-bac7-2fed9414d34d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfafb80a72413884fdfc5e2da797d7d49b920ec54d292e8047453423f1148b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9a316e8c2c059475ace40728063bbcaf73c55837036254e0989f1fc6ae35fb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://006e342754cff26c4bd073a07cbe4e59fcecdee9623d669a9866b6e838455fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.289953 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.301877 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.312518 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.320357 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.320391 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.320400 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.320414 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.320425 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:19Z","lastTransitionTime":"2025-12-02T14:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.328376 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.344149 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.355499 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.365647 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.379551 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.389535 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.399466 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:19Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.423442 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.423642 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.423935 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.424070 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.424219 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:19Z","lastTransitionTime":"2025-12-02T14:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.528024 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.528369 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.528533 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.528732 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.528909 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:19Z","lastTransitionTime":"2025-12-02T14:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.631787 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.631863 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.631891 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.631922 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.631945 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:19Z","lastTransitionTime":"2025-12-02T14:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.735141 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.735192 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.735209 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.735229 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.735244 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:19Z","lastTransitionTime":"2025-12-02T14:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.838406 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.838467 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.838485 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.838511 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.838530 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:19Z","lastTransitionTime":"2025-12-02T14:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.940927 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.941249 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.941379 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.941529 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:19 crc kubenswrapper[4902]: I1202 14:17:19.941683 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:19Z","lastTransitionTime":"2025-12-02T14:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.045178 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.045255 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.045282 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.045314 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.045336 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:20Z","lastTransitionTime":"2025-12-02T14:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.106025 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:20 crc kubenswrapper[4902]: E1202 14:17:20.106284 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.148364 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.148434 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.148455 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.148486 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.148509 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:20Z","lastTransitionTime":"2025-12-02T14:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.250733 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.250786 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.250799 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.250821 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.250838 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:20Z","lastTransitionTime":"2025-12-02T14:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.353703 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.353743 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.353753 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.353768 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.353777 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:20Z","lastTransitionTime":"2025-12-02T14:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.456653 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.456681 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.456690 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.456704 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.456732 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:20Z","lastTransitionTime":"2025-12-02T14:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.559300 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.559345 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.559359 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.559385 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.559406 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:20Z","lastTransitionTime":"2025-12-02T14:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.662368 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.662411 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.662420 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.662437 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.662446 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:20Z","lastTransitionTime":"2025-12-02T14:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.765535 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.765605 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.765614 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.765630 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.765644 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:20Z","lastTransitionTime":"2025-12-02T14:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.868182 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.868216 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.868225 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.868239 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.868250 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:20Z","lastTransitionTime":"2025-12-02T14:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.970700 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.970739 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.970764 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.970779 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:20 crc kubenswrapper[4902]: I1202 14:17:20.970789 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:20Z","lastTransitionTime":"2025-12-02T14:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.073015 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.073319 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.073412 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.073499 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.073601 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:21Z","lastTransitionTime":"2025-12-02T14:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.106808 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:21 crc kubenswrapper[4902]: E1202 14:17:21.106971 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.107005 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.107071 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:21 crc kubenswrapper[4902]: E1202 14:17:21.107156 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:21 crc kubenswrapper[4902]: E1202 14:17:21.107230 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.176644 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.176696 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.176709 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.176727 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.176740 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:21Z","lastTransitionTime":"2025-12-02T14:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.279477 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.279948 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.279961 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.279980 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.279993 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:21Z","lastTransitionTime":"2025-12-02T14:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.382279 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.382354 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.382373 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.382399 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.382418 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:21Z","lastTransitionTime":"2025-12-02T14:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.485170 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.485227 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.485237 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.485252 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.485263 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:21Z","lastTransitionTime":"2025-12-02T14:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.587743 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.587822 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.587846 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.587879 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.587903 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:21Z","lastTransitionTime":"2025-12-02T14:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.690525 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.690589 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.690601 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.690618 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.690630 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:21Z","lastTransitionTime":"2025-12-02T14:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.793259 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.793321 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.793336 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.793357 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.793373 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:21Z","lastTransitionTime":"2025-12-02T14:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.819249 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.819297 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.819318 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.819336 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.819347 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:21Z","lastTransitionTime":"2025-12-02T14:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:21 crc kubenswrapper[4902]: E1202 14:17:21.836443 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:21Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.841678 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.841745 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.841762 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.841791 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.841807 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:21Z","lastTransitionTime":"2025-12-02T14:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:21 crc kubenswrapper[4902]: E1202 14:17:21.856456 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:21Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.860160 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.860212 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.860229 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.860254 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.860269 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:21Z","lastTransitionTime":"2025-12-02T14:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:21 crc kubenswrapper[4902]: E1202 14:17:21.880164 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:21Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.884427 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.884477 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.884489 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.884506 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.884520 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:21Z","lastTransitionTime":"2025-12-02T14:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:21 crc kubenswrapper[4902]: E1202 14:17:21.896656 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:21Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.900709 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.900802 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.900817 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.900868 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.900885 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:21Z","lastTransitionTime":"2025-12-02T14:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:21 crc kubenswrapper[4902]: E1202 14:17:21.911882 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:21Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:21 crc kubenswrapper[4902]: E1202 14:17:21.912073 4902 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.913623 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.913649 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.913659 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.913674 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:21 crc kubenswrapper[4902]: I1202 14:17:21.913686 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:21Z","lastTransitionTime":"2025-12-02T14:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.015822 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.015868 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.015878 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.015894 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.015905 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:22Z","lastTransitionTime":"2025-12-02T14:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.106391 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:22 crc kubenswrapper[4902]: E1202 14:17:22.106555 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.118208 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.118258 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.118271 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.118287 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.118300 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:22Z","lastTransitionTime":"2025-12-02T14:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.220419 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.220491 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.220510 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.220537 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.220555 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:22Z","lastTransitionTime":"2025-12-02T14:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.323405 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.323451 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.323462 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.323478 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.323490 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:22Z","lastTransitionTime":"2025-12-02T14:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.426533 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.426591 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.426605 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.426622 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.426635 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:22Z","lastTransitionTime":"2025-12-02T14:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.528997 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.529045 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.529056 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.529073 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.529084 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:22Z","lastTransitionTime":"2025-12-02T14:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.563782 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vm9q6_381fdb33-d71e-468a-9b1e-a2920c32f8ae/kube-multus/0.log" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.563846 4902 generic.go:334] "Generic (PLEG): container finished" podID="381fdb33-d71e-468a-9b1e-a2920c32f8ae" containerID="644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5" exitCode=1 Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.563882 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vm9q6" event={"ID":"381fdb33-d71e-468a-9b1e-a2920c32f8ae","Type":"ContainerDied","Data":"644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5"} Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.564330 4902 scope.go:117] "RemoveContainer" containerID="644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.589834 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.606218 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.621652 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.632281 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.632312 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.632321 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.632334 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.632343 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:22Z","lastTransitionTime":"2025-12-02T14:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.642201 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.658902 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.672050 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.685507 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"2025-12-02T14:16:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b72a5d22-1577-4124-bdac-4493d23111af\\\\n2025-12-02T14:16:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b72a5d22-1577-4124-bdac-4493d23111af to /host/opt/cni/bin/\\\\n2025-12-02T14:16:36Z [verbose] multus-daemon started\\\\n2025-12-02T14:16:36Z [verbose] Readiness Indicator file check\\\\n2025-12-02T14:17:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.699957 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.714509 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.729116 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.734428 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.734811 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.734832 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.734851 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.734863 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:22Z","lastTransitionTime":"2025-12-02T14:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.752189 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.763694 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3062d831-55a3-4a9b-8eef-246c4ea5f9b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2bfd4f0f73119258643a647e8ee258ad4c08360b2c365a41d6de9966e2f56dc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd96368a989fdbf63188113b3df3433eb65bb3bbb1930e29f408bc2c8dc0a25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cd96368a989fdbf63188113b3df3433eb65bb3bbb1930e29f408bc2c8dc0a25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.792855 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.811999 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.836865 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.836906 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.836917 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.836936 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.836948 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:22Z","lastTransitionTime":"2025-12-02T14:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.846689 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:58Z\\\",\\\"message\\\":\\\"vent Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z]\\\\nI1202 14:16:57.928738 6569 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 14:16:57.928191 6569 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-qhrkh\\\\nI1202 14:16:57.928743 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q55jp_openshift-ovn-kubernetes(9379a49e-d66c-4224-a564-f00d4cadd9ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.865855 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ca28c98-d546-4859-bac7-2fed9414d34d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfafb80a72413884fdfc5e2da797d7d49b920ec54d292e8047453423f1148b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9a316e8c2c059475ace40728063bbcaf73c55837036254e0989f1fc6ae35fb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://006e342754cff26c4bd073a07cbe4e59fcecdee9623d669a9866b6e838455fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.880283 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.890861 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.905513 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:22Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.939278 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.939312 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.939323 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.939339 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:22 crc kubenswrapper[4902]: I1202 14:17:22.939351 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:22Z","lastTransitionTime":"2025-12-02T14:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.041929 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.041962 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.041975 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.041990 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.042001 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:23Z","lastTransitionTime":"2025-12-02T14:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.107145 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:23 crc kubenswrapper[4902]: E1202 14:17:23.107439 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.107520 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.107243 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:23 crc kubenswrapper[4902]: E1202 14:17:23.107758 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:23 crc kubenswrapper[4902]: E1202 14:17:23.107998 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.144687 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.144852 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.145334 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.145359 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.145371 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:23Z","lastTransitionTime":"2025-12-02T14:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.247621 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.247658 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.247669 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.247684 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.247693 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:23Z","lastTransitionTime":"2025-12-02T14:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.350890 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.350953 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.350971 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.350997 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.351015 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:23Z","lastTransitionTime":"2025-12-02T14:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.454421 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.454468 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.454483 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.454501 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.454513 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:23Z","lastTransitionTime":"2025-12-02T14:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.556629 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.556663 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.556677 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.556696 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.556708 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:23Z","lastTransitionTime":"2025-12-02T14:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.568055 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vm9q6_381fdb33-d71e-468a-9b1e-a2920c32f8ae/kube-multus/0.log" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.568113 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vm9q6" event={"ID":"381fdb33-d71e-468a-9b1e-a2920c32f8ae","Type":"ContainerStarted","Data":"2b22a8b4622242be62449d68734ec9d4df52cf0cb435e7c994124c321a57750b"} Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.578187 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3062d831-55a3-4a9b-8eef-246c4ea5f9b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2bfd4f0f73119258643a647e8ee258ad4c08360b2c365a41d6de9966e2f56dc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd96368a989fdbf63188113b3df3433eb65bb3bbb1930e29f408bc2c8dc0a25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cd96368a989fdbf63188113b3df3433eb65bb3bbb1930e29f408bc2c8dc0a25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.596382 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.608512 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.625586 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:58Z\\\",\\\"message\\\":\\\"vent Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z]\\\\nI1202 14:16:57.928738 6569 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 14:16:57.928191 6569 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-qhrkh\\\\nI1202 14:16:57.928743 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-q55jp_openshift-ovn-kubernetes(9379a49e-d66c-4224-a564-f00d4cadd9ea)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.643475 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ca28c98-d546-4859-bac7-2fed9414d34d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfafb80a72413884fdfc5e2da797d7d49b920ec54d292e8047453423f1148b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9a316e8c2c059475ace40728063bbcaf73c55837036254e0989f1fc6ae35fb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://006e342754cff26c4bd073a07cbe4e59fcecdee9623d669a9866b6e838455fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.655102 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.658954 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.658990 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.658999 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.659013 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.659023 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:23Z","lastTransitionTime":"2025-12-02T14:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.665677 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.678032 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.694903 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b22a8b4622242be62449d68734ec9d4df52cf0cb435e7c994124c321a57750b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"2025-12-02T14:16:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b72a5d22-1577-4124-bdac-4493d23111af\\\\n2025-12-02T14:16:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b72a5d22-1577-4124-bdac-4493d23111af to /host/opt/cni/bin/\\\\n2025-12-02T14:16:36Z [verbose] multus-daemon started\\\\n2025-12-02T14:16:36Z [verbose] Readiness Indicator file check\\\\n2025-12-02T14:17:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.710033 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.723449 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.739225 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.759897 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.761767 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.761848 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.761900 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.761928 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.761947 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:23Z","lastTransitionTime":"2025-12-02T14:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.773283 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.788305 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.804850 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.819390 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.832656 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.854206 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:23Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.865650 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.865723 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.865751 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.865784 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.865809 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:23Z","lastTransitionTime":"2025-12-02T14:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.969072 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.969144 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.969164 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.969190 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:23 crc kubenswrapper[4902]: I1202 14:17:23.969208 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:23Z","lastTransitionTime":"2025-12-02T14:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.072868 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.072924 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.072941 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.072963 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.072979 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:24Z","lastTransitionTime":"2025-12-02T14:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.106351 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:24 crc kubenswrapper[4902]: E1202 14:17:24.106624 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.184538 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.184815 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.184834 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.184859 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.184877 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:24Z","lastTransitionTime":"2025-12-02T14:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.288359 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.288411 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.288428 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.288450 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.288467 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:24Z","lastTransitionTime":"2025-12-02T14:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.424325 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.424398 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.424415 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.424442 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.424461 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:24Z","lastTransitionTime":"2025-12-02T14:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.527393 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.527458 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.527477 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.527503 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.527522 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:24Z","lastTransitionTime":"2025-12-02T14:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.630980 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.631045 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.631062 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.631085 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.631104 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:24Z","lastTransitionTime":"2025-12-02T14:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.734012 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.734085 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.734106 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.734132 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.734153 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:24Z","lastTransitionTime":"2025-12-02T14:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.836870 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.836937 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.836954 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.836979 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.836998 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:24Z","lastTransitionTime":"2025-12-02T14:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.940242 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.940329 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.940350 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.940378 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:24 crc kubenswrapper[4902]: I1202 14:17:24.940397 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:24Z","lastTransitionTime":"2025-12-02T14:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.043887 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.043949 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.043966 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.043992 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.044012 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:25Z","lastTransitionTime":"2025-12-02T14:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.105949 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.105993 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.105977 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:25 crc kubenswrapper[4902]: E1202 14:17:25.106209 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:25 crc kubenswrapper[4902]: E1202 14:17:25.106408 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:25 crc kubenswrapper[4902]: E1202 14:17:25.106819 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.146503 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.146636 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.146725 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.146754 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.146771 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:25Z","lastTransitionTime":"2025-12-02T14:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.250115 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.250186 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.250209 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.250239 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.250260 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:25Z","lastTransitionTime":"2025-12-02T14:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.353325 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.353381 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.353435 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.353490 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.353509 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:25Z","lastTransitionTime":"2025-12-02T14:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.467835 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.467894 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.467911 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.467936 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.467953 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:25Z","lastTransitionTime":"2025-12-02T14:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.571016 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.571074 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.571091 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.571114 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.571130 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:25Z","lastTransitionTime":"2025-12-02T14:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.674184 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.674260 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.674287 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.674322 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.674344 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:25Z","lastTransitionTime":"2025-12-02T14:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.777492 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.777547 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.777613 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.777661 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.777685 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:25Z","lastTransitionTime":"2025-12-02T14:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.881550 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.881612 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.881623 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.881640 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.881652 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:25Z","lastTransitionTime":"2025-12-02T14:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.984717 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.984799 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.984825 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.984855 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:25 crc kubenswrapper[4902]: I1202 14:17:25.984876 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:25Z","lastTransitionTime":"2025-12-02T14:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.088939 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.089008 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.089029 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.089056 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.089077 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:26Z","lastTransitionTime":"2025-12-02T14:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.105674 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:26 crc kubenswrapper[4902]: E1202 14:17:26.105914 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.191547 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.191647 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.191669 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.192153 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.192217 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:26Z","lastTransitionTime":"2025-12-02T14:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.295122 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.295186 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.295203 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.295230 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.295249 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:26Z","lastTransitionTime":"2025-12-02T14:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.397938 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.398004 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.398021 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.398047 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.398069 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:26Z","lastTransitionTime":"2025-12-02T14:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.501462 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.501546 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.501604 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.501638 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.501664 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:26Z","lastTransitionTime":"2025-12-02T14:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.604298 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.604384 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.604396 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.604414 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.604426 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:26Z","lastTransitionTime":"2025-12-02T14:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.708182 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.708262 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.708281 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.708313 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.708330 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:26Z","lastTransitionTime":"2025-12-02T14:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.811928 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.811985 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.812002 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.812026 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.812044 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:26Z","lastTransitionTime":"2025-12-02T14:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.915263 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.915319 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.915340 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.915372 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:26 crc kubenswrapper[4902]: I1202 14:17:26.915394 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:26Z","lastTransitionTime":"2025-12-02T14:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.018366 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.018434 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.018457 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.018485 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.018505 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:27Z","lastTransitionTime":"2025-12-02T14:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.106010 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.106034 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.106514 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:27 crc kubenswrapper[4902]: E1202 14:17:27.106736 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:27 crc kubenswrapper[4902]: E1202 14:17:27.106876 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:27 crc kubenswrapper[4902]: E1202 14:17:27.107065 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.107146 4902 scope.go:117] "RemoveContainer" containerID="56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.121043 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.121122 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.121146 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.121175 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.121198 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:27Z","lastTransitionTime":"2025-12-02T14:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.225328 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.225823 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.225845 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.225869 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.225886 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:27Z","lastTransitionTime":"2025-12-02T14:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.330068 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.330120 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.330131 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.330148 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.330159 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:27Z","lastTransitionTime":"2025-12-02T14:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.432779 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.432837 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.432856 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.432886 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.432904 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:27Z","lastTransitionTime":"2025-12-02T14:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.536322 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.536399 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.536419 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.536444 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.536462 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:27Z","lastTransitionTime":"2025-12-02T14:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.639972 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.640058 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.640093 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.640123 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.640145 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:27Z","lastTransitionTime":"2025-12-02T14:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.743102 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.743148 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.743160 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.743180 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.743192 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:27Z","lastTransitionTime":"2025-12-02T14:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.845102 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.845172 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.845189 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.845216 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.845233 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:27Z","lastTransitionTime":"2025-12-02T14:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.948438 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.948484 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.948494 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.948510 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:27 crc kubenswrapper[4902]: I1202 14:17:27.948522 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:27Z","lastTransitionTime":"2025-12-02T14:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.081856 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.081891 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.081902 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.081917 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.081931 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:28Z","lastTransitionTime":"2025-12-02T14:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.105498 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:28 crc kubenswrapper[4902]: E1202 14:17:28.105673 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.183965 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.183992 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.184001 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.184014 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.184025 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:28Z","lastTransitionTime":"2025-12-02T14:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.285959 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.286009 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.286021 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.286037 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.286049 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:28Z","lastTransitionTime":"2025-12-02T14:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.388662 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.388705 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.388721 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.388743 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.388760 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:28Z","lastTransitionTime":"2025-12-02T14:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.493176 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.493214 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.493225 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.493242 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.493252 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:28Z","lastTransitionTime":"2025-12-02T14:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.595077 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.595134 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.595152 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.595176 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.595194 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:28Z","lastTransitionTime":"2025-12-02T14:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.595478 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/2.log" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.598870 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerStarted","Data":"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d"} Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.599395 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.615547 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3062d831-55a3-4a9b-8eef-246c4ea5f9b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2bfd4f0f73119258643a647e8ee258ad4c08360b2c365a41d6de9966e2f56dc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd96368a989fdbf63188113b3df3433eb65bb3bbb1930e29f408bc2c8dc0a25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cd96368a989fdbf63188113b3df3433eb65bb3bbb1930e29f408bc2c8dc0a25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.640186 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.658837 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.693814 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:58Z\\\",\\\"message\\\":\\\"vent Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z]\\\\nI1202 14:16:57.928738 6569 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 14:16:57.928191 6569 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-qhrkh\\\\nI1202 14:16:57.928743 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:17:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.698184 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.698241 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.698258 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.698282 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.698304 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:28Z","lastTransitionTime":"2025-12-02T14:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.710785 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ca28c98-d546-4859-bac7-2fed9414d34d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfafb80a72413884fdfc5e2da797d7d49b920ec54d292e8047453423f1148b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9a316e8c2c059475ace40728063bbcaf73c55837036254e0989f1fc6ae35fb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://006e342754cff26c4bd073a07cbe4e59fcecdee9623d669a9866b6e838455fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.730179 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.746654 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.762523 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.785303 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.801734 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.801804 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.801829 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.801862 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.801885 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:28Z","lastTransitionTime":"2025-12-02T14:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.804950 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.819054 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.837186 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.852498 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b22a8b4622242be62449d68734ec9d4df52cf0cb435e7c994124c321a57750b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"2025-12-02T14:16:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b72a5d22-1577-4124-bdac-4493d23111af\\\\n2025-12-02T14:16:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b72a5d22-1577-4124-bdac-4493d23111af to /host/opt/cni/bin/\\\\n2025-12-02T14:16:36Z [verbose] multus-daemon started\\\\n2025-12-02T14:16:36Z [verbose] Readiness Indicator file check\\\\n2025-12-02T14:17:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.866602 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.882204 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.897723 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.903835 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.903879 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.903891 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.903909 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.903921 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:28Z","lastTransitionTime":"2025-12-02T14:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.912758 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.928270 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:28 crc kubenswrapper[4902]: I1202 14:17:28.944333 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:28Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.007462 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.007732 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.007855 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.007953 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.008041 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:29Z","lastTransitionTime":"2025-12-02T14:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.105816 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.105875 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.105911 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:29 crc kubenswrapper[4902]: E1202 14:17:29.105989 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:29 crc kubenswrapper[4902]: E1202 14:17:29.106182 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:29 crc kubenswrapper[4902]: E1202 14:17:29.106236 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.110972 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.111037 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.111054 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.111083 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.111101 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:29Z","lastTransitionTime":"2025-12-02T14:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.119937 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3062d831-55a3-4a9b-8eef-246c4ea5f9b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2bfd4f0f73119258643a647e8ee258ad4c08360b2c365a41d6de9966e2f56dc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd96368a989fdbf63188113b3df3433eb65bb3bbb1930e29f408bc2c8dc0a25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cd96368a989fdbf63188113b3df3433eb65bb3bbb1930e29f408bc2c8dc0a25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.157170 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.173071 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.204186 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:58Z\\\",\\\"message\\\":\\\"vent Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z]\\\\nI1202 14:16:57.928738 6569 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 14:16:57.928191 6569 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-qhrkh\\\\nI1202 14:16:57.928743 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:17:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.213631 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.213676 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.213689 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.213710 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.213731 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:29Z","lastTransitionTime":"2025-12-02T14:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.217083 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ca28c98-d546-4859-bac7-2fed9414d34d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfafb80a72413884fdfc5e2da797d7d49b920ec54d292e8047453423f1148b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9a316e8c2c059475ace40728063bbcaf73c55837036254e0989f1fc6ae35fb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://006e342754cff26c4bd073a07cbe4e59fcecdee9623d669a9866b6e838455fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.230196 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.239746 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.256110 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.269948 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.283849 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.297969 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.312464 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.316729 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.316779 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.316791 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.316811 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.316823 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:29Z","lastTransitionTime":"2025-12-02T14:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.329382 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b22a8b4622242be62449d68734ec9d4df52cf0cb435e7c994124c321a57750b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"2025-12-02T14:16:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b72a5d22-1577-4124-bdac-4493d23111af\\\\n2025-12-02T14:16:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b72a5d22-1577-4124-bdac-4493d23111af to /host/opt/cni/bin/\\\\n2025-12-02T14:16:36Z [verbose] multus-daemon started\\\\n2025-12-02T14:16:36Z [verbose] Readiness Indicator file check\\\\n2025-12-02T14:17:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.344135 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.360040 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.378850 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.398345 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.413446 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.419654 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.419697 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.419707 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.419723 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.419737 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:29Z","lastTransitionTime":"2025-12-02T14:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.433420 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:29Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.523097 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.523170 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.523197 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.523230 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.523255 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:29Z","lastTransitionTime":"2025-12-02T14:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.626478 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.626533 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.626557 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.626621 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.626644 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:29Z","lastTransitionTime":"2025-12-02T14:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.729133 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.729758 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.729834 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.729979 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.730051 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:29Z","lastTransitionTime":"2025-12-02T14:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.832937 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.832996 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.833015 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.833040 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.833072 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:29Z","lastTransitionTime":"2025-12-02T14:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.936225 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.936285 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.936298 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.936321 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:29 crc kubenswrapper[4902]: I1202 14:17:29.936334 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:29Z","lastTransitionTime":"2025-12-02T14:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.039703 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.039752 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.039761 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.039778 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.039789 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:30Z","lastTransitionTime":"2025-12-02T14:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.105814 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:30 crc kubenswrapper[4902]: E1202 14:17:30.106081 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.143687 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.143767 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.143786 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.143816 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.143838 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:30Z","lastTransitionTime":"2025-12-02T14:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.247118 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.247178 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.247190 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.247216 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.247236 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:30Z","lastTransitionTime":"2025-12-02T14:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.351479 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.351550 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.351610 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.351637 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.351658 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:30Z","lastTransitionTime":"2025-12-02T14:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.499747 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.499829 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.499857 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.499892 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.499916 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:30Z","lastTransitionTime":"2025-12-02T14:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.603116 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.603169 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.603183 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.603201 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.603214 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:30Z","lastTransitionTime":"2025-12-02T14:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.607043 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/3.log" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.608321 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/2.log" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.611470 4902 generic.go:334] "Generic (PLEG): container finished" podID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerID="cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d" exitCode=1 Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.611516 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerDied","Data":"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d"} Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.611553 4902 scope.go:117] "RemoveContainer" containerID="56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.612734 4902 scope.go:117] "RemoveContainer" containerID="cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d" Dec 02 14:17:30 crc kubenswrapper[4902]: E1202 14:17:30.613004 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-q55jp_openshift-ovn-kubernetes(9379a49e-d66c-4224-a564-f00d4cadd9ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.633344 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3062d831-55a3-4a9b-8eef-246c4ea5f9b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2bfd4f0f73119258643a647e8ee258ad4c08360b2c365a41d6de9966e2f56dc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd96368a989fdbf63188113b3df3433eb65bb3bbb1930e29f408bc2c8dc0a25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cd96368a989fdbf63188113b3df3433eb65bb3bbb1930e29f408bc2c8dc0a25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.657433 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34f23499-8642-46e4-abc5-2d44a5202c98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fe54806c7a8738845142e7cfe0e6e9856e47ee62f7eabd85e65ecc59b117d59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://117ccce91ac0b2ba3d9d7d037abc39f6240ff02d319132f665eced80b6932dc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fcc5e5bda13aa88c4534131a96c1d67e7b34fec315c035a8dfdaf9a38ca963f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c0eeeb392aff227f5a65de3b612772fc6600186db95d1d1756d12e2a615c494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3646824a95dcb0a68292fd24957ca72454f6e8a106a784dde0602ff49b6e0e4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1d1ced5f213fb50effc2cd4958f753917c136214c5731e62a0f8746eb47cdcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e66bc47154f3a649f4c63c4d7d015b28eda657c19cea8baaf50bb589cb2bbf5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2db6919885229ad8f073589d093758d6bb8228a620927c425638f03444dee22e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.671718 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.691713 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9379a49e-d66c-4224-a564-f00d4cadd9ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56b11b2a994a03558c03ae86a72b7c9e60f0881ff81c171a95c5728c9d21e6f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:16:58Z\\\",\\\"message\\\":\\\"vent Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:16:57Z is after 2025-08-24T17:21:41Z]\\\\nI1202 14:16:57.928738 6569 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 14:16:57.928191 6569 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-qhrkh\\\\nI1202 14:16:57.928743 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:17:29Z\\\",\\\"message\\\":\\\"\\\\\\\"6ea1fd71-2b40-4361-92ee-3f1ab4ec7414\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.150\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1202 14:17:28.540546 6947 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 14:17:28.540637 6947 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:17:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6g4l5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-q55jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.704408 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ca28c98-d546-4859-bac7-2fed9414d34d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cfafb80a72413884fdfc5e2da797d7d49b920ec54d292e8047453423f1148b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9a316e8c2c059475ace40728063bbcaf73c55837036254e0989f1fc6ae35fb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://006e342754cff26c4bd073a07cbe4e59fcecdee9623d669a9866b6e838455fab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9b2695615759014cb44016d8d96614277027fe12f9b14768d3916d7e2bb3198\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.706254 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.706326 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.706347 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.706372 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.706391 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:30Z","lastTransitionTime":"2025-12-02T14:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.724085 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12a1ddef66553905c8575d53834cea28584b6391d83b78800889d367ec623c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18fa37dfabc2da5e65a5768dd3b0468bb9f09de25d1f80ea99f7b1036dd8bbb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.737822 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qhrkh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66d4b493-68d5-4caa-b053-eb1858dd41d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d137f3f555312275fbe2b826f493db3beff10c8af91fe290d37c5497aa0ab8a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ph468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qhrkh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.753661 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-8tnr5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e841e0-3d80-41c7-b03b-672d129caddd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4e42f6c6ca3c270e034a24dc63338a2199fbcb1820b015aa52b5d19a5bee737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xrh2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:35Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-8tnr5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.773156 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81d41cbd-844a-417a-908f-1c7861a696cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"-endpoints-signer@1764684971\\\\\\\\\\\\\\\" (2025-12-02 14:16:10 +0000 UTC to 2026-01-01 14:16:11 +0000 UTC (now=2025-12-02 14:16:27.2057598 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205892 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764684981\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764684981\\\\\\\\\\\\\\\" (2025-12-02 13:16:21 +0000 UTC to 2026-12-02 13:16:21 +0000 UTC (now=2025-12-02 14:16:27.205875594 +0000 UTC))\\\\\\\"\\\\nI1202 14:16:27.205908 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 14:16:27.205926 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 14:16:27.205943 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205958 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1202 14:16:27.205981 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1798125142/tls.crt::/tmp/serving-cert-1798125142/tls.key\\\\\\\"\\\\nI1202 14:16:27.206072 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 14:16:27.206714 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206746 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1202 14:16:27.206810 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 14:16:27.206831 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1202 14:16:27.206845 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1202 14:16:27.206926 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.793689 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a8c056163313a8272aa794d229727cd9ea8616762afb83fe207410547874a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.809319 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.809487 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.809509 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.809536 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.809555 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:30Z","lastTransitionTime":"2025-12-02T14:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.816096 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://929cba7d70f2e4af2a2f339b8d7ba2338c99b36ef735dd52cc5468abc082cc8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.834643 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.881375 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vm9q6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"381fdb33-d71e-468a-9b1e-a2920c32f8ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b22a8b4622242be62449d68734ec9d4df52cf0cb435e7c994124c321a57750b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T14:17:21Z\\\",\\\"message\\\":\\\"2025-12-02T14:16:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b72a5d22-1577-4124-bdac-4493d23111af\\\\n2025-12-02T14:16:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b72a5d22-1577-4124-bdac-4493d23111af to /host/opt/cni/bin/\\\\n2025-12-02T14:16:36Z [verbose] multus-daemon started\\\\n2025-12-02T14:16:36Z [verbose] Readiness Indicator file check\\\\n2025-12-02T14:17:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:17:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pjr87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vm9q6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.903963 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ddca97d-8b68-4f8f-93f3-3a7b94d76e4d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5216879f4136b5eb3bdcddee747c8cf4a94c94d658c3a39e4c4cb2b5af8712c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27df890a16ad475acb76958fd8dd7d3892eb0813ddce26eb5dc862703acf2e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjpn7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfj5l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.912310 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.912367 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.912376 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.912390 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.912399 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:30Z","lastTransitionTime":"2025-12-02T14:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.918601 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b63b8b19-f855-4038-891d-6bfd1e5021de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mjj4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tlnwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.930745 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73bd077f-0c52-4033-84e4-e57749720961\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09aaea5c575f0884610e1fe3414a8b0fe7429ad441f2b1c20b97663939939e46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad56d5e859495ac5f3ea004e4158866ae9f6a6dba1980cacbc9c15bbd2c25c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d45c59e3a56b2917f75a3a646eaa7d2ae0443cb29f2a4a4472a60011c97f4ca7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.941084 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:27Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.950498 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c78ba9cf-533f-4683-8531-045256a5d819\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b3acca9f678f03192a292627a813e5327dc451571c7754fd56cfd9fdd95bff6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wxbj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-v8znh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:30 crc kubenswrapper[4902]: I1202 14:17:30.964969 4902 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jm696" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4c95c0d-fa60-4166-beff-455723877688\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T14:16:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b57e5637e52800e6cd0798e7ea84c3dbd6d92b11e7991882f7989b926645ac7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T14:16:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b1d458fa9c9737539314ae0d13964b41d59038abe0fab6cad102e9d058e1403a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85b8b400f0bb25225f85a5d0fbb2c8eb65600894647a563be69bb8d185c02448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://867bdbb9a2f747680183487b6066977d0d5667abb488b489172aecbde66566a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2155a9fb4380553165ff303941adffe57da02dd095dd884602a37a2e16c2dc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7c41b1dc6ee26a66b3fdc5e94af0c71ab05ee82962bec04bdf7f7cce80138b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7fcd7a37b1ace723a3a001e3be94f99e433d14b9590f182e78abce5384eabc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T14:16:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T14:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sq8gs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T14:16:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jm696\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:30Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.015136 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.015200 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.015223 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.015251 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.015272 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:31Z","lastTransitionTime":"2025-12-02T14:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.106409 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.106497 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.106637 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.106690 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.106832 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.107027 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.117751 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.117792 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.117810 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.117832 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.117849 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:31Z","lastTransitionTime":"2025-12-02T14:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.214200 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.214350 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.214408 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:35.214368542 +0000 UTC m=+146.405677281 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.214479 4902 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.214494 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.214597 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.214638 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:18:35.214599168 +0000 UTC m=+146.405907917 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.214708 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.214792 4902 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.214936 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 14:18:35.214903867 +0000 UTC m=+146.406212576 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.214960 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.214986 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.214998 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.215005 4902 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.215025 4902 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.215045 4902 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.215074 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 14:18:35.215058921 +0000 UTC m=+146.406367660 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:17:31 crc kubenswrapper[4902]: E1202 14:17:31.215100 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 14:18:35.215084702 +0000 UTC m=+146.406393451 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.220726 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.220876 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.220903 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.220936 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.220961 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:31Z","lastTransitionTime":"2025-12-02T14:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.325726 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.325808 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.325831 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.325862 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.325885 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:31Z","lastTransitionTime":"2025-12-02T14:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.428455 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.428536 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.428606 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.428642 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.428668 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:31Z","lastTransitionTime":"2025-12-02T14:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.532363 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.532423 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.532438 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.532462 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.532480 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:31Z","lastTransitionTime":"2025-12-02T14:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.617105 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/3.log" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.635759 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.636029 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.636051 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.636079 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.636100 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:31Z","lastTransitionTime":"2025-12-02T14:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.739250 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.739330 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.739347 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.739373 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.739390 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:31Z","lastTransitionTime":"2025-12-02T14:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.842290 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.842371 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.842390 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.842416 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.842434 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:31Z","lastTransitionTime":"2025-12-02T14:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.945086 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.945139 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.945157 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.945184 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:31 crc kubenswrapper[4902]: I1202 14:17:31.945202 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:31Z","lastTransitionTime":"2025-12-02T14:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.047692 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.047831 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.047912 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.047950 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.048018 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:32Z","lastTransitionTime":"2025-12-02T14:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.105874 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:32 crc kubenswrapper[4902]: E1202 14:17:32.106042 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.151450 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.151533 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.151602 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.151635 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.151659 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:32Z","lastTransitionTime":"2025-12-02T14:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.254921 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.254996 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.255017 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.255047 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.255069 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:32Z","lastTransitionTime":"2025-12-02T14:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.298839 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.298926 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.298943 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.298970 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.298988 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:32Z","lastTransitionTime":"2025-12-02T14:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:32 crc kubenswrapper[4902]: E1202 14:17:32.314831 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.320141 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.320174 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.320183 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.320201 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.320212 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:32Z","lastTransitionTime":"2025-12-02T14:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:32 crc kubenswrapper[4902]: E1202 14:17:32.335110 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.339649 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.339678 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.339686 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.339700 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.339710 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:32Z","lastTransitionTime":"2025-12-02T14:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:32 crc kubenswrapper[4902]: E1202 14:17:32.357475 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.362079 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.362352 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.362617 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.362825 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.363012 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:32Z","lastTransitionTime":"2025-12-02T14:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:32 crc kubenswrapper[4902]: E1202 14:17:32.381079 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.386206 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.386259 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.386308 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.386351 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.386443 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:32Z","lastTransitionTime":"2025-12-02T14:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:32 crc kubenswrapper[4902]: E1202 14:17:32.408519 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T14:17:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6bdcc9b0-fdff-4d27-a2f4-57188241bda3\\\",\\\"systemUUID\\\":\\\"acc9633f-b885-4291-bd1d-c1f58994796b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T14:17:32Z is after 2025-08-24T17:21:41Z" Dec 02 14:17:32 crc kubenswrapper[4902]: E1202 14:17:32.408881 4902 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.411065 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.411134 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.411158 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.411186 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.411209 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:32Z","lastTransitionTime":"2025-12-02T14:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.514257 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.514322 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.514381 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.514408 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.514431 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:32Z","lastTransitionTime":"2025-12-02T14:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.617589 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.617919 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.618063 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.618228 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.618355 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:32Z","lastTransitionTime":"2025-12-02T14:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.721190 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.721246 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.721263 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.721290 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.721307 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:32Z","lastTransitionTime":"2025-12-02T14:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.824542 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.824642 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.824661 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.824686 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.824703 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:32Z","lastTransitionTime":"2025-12-02T14:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.928012 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.928097 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.928115 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.928142 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:32 crc kubenswrapper[4902]: I1202 14:17:32.928160 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:32Z","lastTransitionTime":"2025-12-02T14:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.031256 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.031299 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.031310 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.031327 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.031342 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:33Z","lastTransitionTime":"2025-12-02T14:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.105714 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.105902 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:33 crc kubenswrapper[4902]: E1202 14:17:33.106155 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.106183 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:33 crc kubenswrapper[4902]: E1202 14:17:33.106279 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:33 crc kubenswrapper[4902]: E1202 14:17:33.106342 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.135144 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.135217 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.135233 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.135258 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.135276 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:33Z","lastTransitionTime":"2025-12-02T14:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.238680 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.238742 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.238765 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.238797 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.238817 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:33Z","lastTransitionTime":"2025-12-02T14:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.341823 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.341874 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.341889 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.341912 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.341928 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:33Z","lastTransitionTime":"2025-12-02T14:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.444622 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.444708 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.444742 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.444771 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.444792 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:33Z","lastTransitionTime":"2025-12-02T14:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.547869 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.548311 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.548329 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.548356 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.548376 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:33Z","lastTransitionTime":"2025-12-02T14:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.650943 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.651052 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.651070 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.651095 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.651113 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:33Z","lastTransitionTime":"2025-12-02T14:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.754641 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.754719 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.754742 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.754774 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.754796 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:33Z","lastTransitionTime":"2025-12-02T14:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.857680 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.857740 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.857758 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.857783 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.857802 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:33Z","lastTransitionTime":"2025-12-02T14:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.960814 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.960874 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.960891 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.960916 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:33 crc kubenswrapper[4902]: I1202 14:17:33.960933 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:33Z","lastTransitionTime":"2025-12-02T14:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.063536 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.063766 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.063801 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.063829 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.063860 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:34Z","lastTransitionTime":"2025-12-02T14:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.106186 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:34 crc kubenswrapper[4902]: E1202 14:17:34.106454 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.166899 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.166947 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.166958 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.166978 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.166989 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:34Z","lastTransitionTime":"2025-12-02T14:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.276497 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.276599 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.276627 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.276653 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.276673 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:34Z","lastTransitionTime":"2025-12-02T14:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.379637 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.379709 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.379721 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.379738 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.379752 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:34Z","lastTransitionTime":"2025-12-02T14:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.482623 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.482681 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.482697 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.482721 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.482739 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:34Z","lastTransitionTime":"2025-12-02T14:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.586531 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.586618 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.586639 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.586669 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.586690 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:34Z","lastTransitionTime":"2025-12-02T14:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.688868 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.688957 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.688979 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.689010 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.689035 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:34Z","lastTransitionTime":"2025-12-02T14:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.791816 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.791889 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.791912 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.791940 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.791959 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:34Z","lastTransitionTime":"2025-12-02T14:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.895977 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.896032 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.896049 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.896074 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.896091 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:34Z","lastTransitionTime":"2025-12-02T14:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.999522 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.999601 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.999614 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.999629 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:34 crc kubenswrapper[4902]: I1202 14:17:34.999641 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:34Z","lastTransitionTime":"2025-12-02T14:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.102502 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.102545 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.102554 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.102584 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.102594 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:35Z","lastTransitionTime":"2025-12-02T14:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.105941 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.105963 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.105939 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:35 crc kubenswrapper[4902]: E1202 14:17:35.106039 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:35 crc kubenswrapper[4902]: E1202 14:17:35.106194 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:35 crc kubenswrapper[4902]: E1202 14:17:35.106302 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.206006 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.206100 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.206117 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.206173 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.206191 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:35Z","lastTransitionTime":"2025-12-02T14:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.310216 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.310274 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.310291 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.310316 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.310334 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:35Z","lastTransitionTime":"2025-12-02T14:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.413701 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.413773 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.413799 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.413831 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.413854 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:35Z","lastTransitionTime":"2025-12-02T14:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.516878 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.517001 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.517021 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.517095 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.517113 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:35Z","lastTransitionTime":"2025-12-02T14:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.621235 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.621788 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.621871 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.621900 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.621919 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:35Z","lastTransitionTime":"2025-12-02T14:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.724975 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.725077 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.725194 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.725236 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.725262 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:35Z","lastTransitionTime":"2025-12-02T14:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.828715 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.828782 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.828796 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.828818 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.828835 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:35Z","lastTransitionTime":"2025-12-02T14:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.932130 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.932181 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.932198 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.932222 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:35 crc kubenswrapper[4902]: I1202 14:17:35.932240 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:35Z","lastTransitionTime":"2025-12-02T14:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.034780 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.034817 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.034827 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.034848 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.034859 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:36Z","lastTransitionTime":"2025-12-02T14:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.106393 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:36 crc kubenswrapper[4902]: E1202 14:17:36.106737 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.136926 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.136999 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.137017 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.137039 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.137056 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:36Z","lastTransitionTime":"2025-12-02T14:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.239943 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.240000 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.240131 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.240165 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.240184 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:36Z","lastTransitionTime":"2025-12-02T14:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.342818 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.342884 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.342907 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.342939 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.342964 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:36Z","lastTransitionTime":"2025-12-02T14:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.446399 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.446461 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.446476 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.446499 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.446515 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:36Z","lastTransitionTime":"2025-12-02T14:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.548557 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.548643 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.548664 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.548686 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.548698 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:36Z","lastTransitionTime":"2025-12-02T14:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.650697 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.650744 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.650756 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.650776 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.650788 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:36Z","lastTransitionTime":"2025-12-02T14:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.753477 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.753531 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.753540 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.753555 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.753583 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:36Z","lastTransitionTime":"2025-12-02T14:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.856351 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.856391 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.856401 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.856422 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.856433 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:36Z","lastTransitionTime":"2025-12-02T14:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.959981 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.960130 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.960142 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.960165 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:36 crc kubenswrapper[4902]: I1202 14:17:36.960177 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:36Z","lastTransitionTime":"2025-12-02T14:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.062998 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.063079 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.063097 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.063125 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.063144 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:37Z","lastTransitionTime":"2025-12-02T14:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.106342 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.106366 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.106443 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:37 crc kubenswrapper[4902]: E1202 14:17:37.107264 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:37 crc kubenswrapper[4902]: E1202 14:17:37.107484 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:37 crc kubenswrapper[4902]: E1202 14:17:37.107655 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.165487 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.165541 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.165555 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.165605 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.165618 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:37Z","lastTransitionTime":"2025-12-02T14:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.268811 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.268897 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.268920 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.268960 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.268986 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:37Z","lastTransitionTime":"2025-12-02T14:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.372492 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.372555 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.372598 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.372627 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.372643 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:37Z","lastTransitionTime":"2025-12-02T14:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.475835 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.475895 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.475913 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.475936 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.475955 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:37Z","lastTransitionTime":"2025-12-02T14:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.579324 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.579401 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.579427 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.579463 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.579488 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:37Z","lastTransitionTime":"2025-12-02T14:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.682381 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.682442 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.682463 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.682489 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.682506 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:37Z","lastTransitionTime":"2025-12-02T14:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.785082 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.785157 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.785174 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.785199 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.785216 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:37Z","lastTransitionTime":"2025-12-02T14:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.887779 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.887843 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.887865 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.887893 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.887916 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:37Z","lastTransitionTime":"2025-12-02T14:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.991157 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.991225 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.991262 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.991300 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:37 crc kubenswrapper[4902]: I1202 14:17:37.991324 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:37Z","lastTransitionTime":"2025-12-02T14:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.094609 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.094690 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.094710 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.094733 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.094749 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:38Z","lastTransitionTime":"2025-12-02T14:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.106326 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:38 crc kubenswrapper[4902]: E1202 14:17:38.106516 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.197458 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.197598 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.197621 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.197659 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.197680 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:38Z","lastTransitionTime":"2025-12-02T14:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.300519 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.300795 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.300925 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.301018 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.301100 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:38Z","lastTransitionTime":"2025-12-02T14:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.403757 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.403817 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.403834 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.403859 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.403877 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:38Z","lastTransitionTime":"2025-12-02T14:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.507172 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.507237 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.507266 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.507296 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.507320 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:38Z","lastTransitionTime":"2025-12-02T14:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.611153 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.611659 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.611918 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.612149 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.612362 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:38Z","lastTransitionTime":"2025-12-02T14:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.716002 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.716059 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.716075 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.716098 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.716116 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:38Z","lastTransitionTime":"2025-12-02T14:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.819833 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.819910 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.819933 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.819957 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.819977 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:38Z","lastTransitionTime":"2025-12-02T14:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.922020 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.922071 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.922081 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.922096 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:38 crc kubenswrapper[4902]: I1202 14:17:38.922105 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:38Z","lastTransitionTime":"2025-12-02T14:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.024973 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.025042 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.025125 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.025159 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.025180 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:39Z","lastTransitionTime":"2025-12-02T14:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.105904 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.105970 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.105931 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:39 crc kubenswrapper[4902]: E1202 14:17:39.106764 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:39 crc kubenswrapper[4902]: E1202 14:17:39.106917 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:39 crc kubenswrapper[4902]: E1202 14:17:39.107100 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.129471 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.129512 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.129523 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.129541 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.129552 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:39Z","lastTransitionTime":"2025-12-02T14:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.141187 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=39.141166787 podStartE2EDuration="39.141166787s" podCreationTimestamp="2025-12-02 14:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:17:39.141071045 +0000 UTC m=+90.332379784" watchObservedRunningTime="2025-12-02 14:17:39.141166787 +0000 UTC m=+90.332475506" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.179817 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-qhrkh" podStartSLOduration=68.179786466 podStartE2EDuration="1m8.179786466s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:17:39.179329603 +0000 UTC m=+90.370638372" watchObservedRunningTime="2025-12-02 14:17:39.179786466 +0000 UTC m=+90.371095205" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.213465 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-8tnr5" podStartSLOduration=68.213432394 podStartE2EDuration="1m8.213432394s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:17:39.195084657 +0000 UTC m=+90.386393406" watchObservedRunningTime="2025-12-02 14:17:39.213432394 +0000 UTC m=+90.404741153" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.213778 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-vm9q6" podStartSLOduration=68.213764763 podStartE2EDuration="1m8.213764763s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:17:39.213083844 +0000 UTC m=+90.404392543" watchObservedRunningTime="2025-12-02 14:17:39.213764763 +0000 UTC m=+90.405073522" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.228591 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfj5l" podStartSLOduration=67.22855669 podStartE2EDuration="1m7.22855669s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:17:39.228463907 +0000 UTC m=+90.419772626" watchObservedRunningTime="2025-12-02 14:17:39.22855669 +0000 UTC m=+90.419865399" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.233892 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.233931 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.233941 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.233956 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.233969 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:39Z","lastTransitionTime":"2025-12-02T14:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.257399 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=72.257373182 podStartE2EDuration="1m12.257373182s" podCreationTimestamp="2025-12-02 14:16:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:17:39.256464426 +0000 UTC m=+90.447773155" watchObservedRunningTime="2025-12-02 14:17:39.257373182 +0000 UTC m=+90.448681891" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.324185 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=72.324168694 podStartE2EDuration="1m12.324168694s" podCreationTimestamp="2025-12-02 14:16:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:17:39.32401841 +0000 UTC m=+90.515327129" watchObservedRunningTime="2025-12-02 14:17:39.324168694 +0000 UTC m=+90.515477403" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.336097 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.336136 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.336147 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.336166 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.336178 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:39Z","lastTransitionTime":"2025-12-02T14:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.369191 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podStartSLOduration=68.369169673 podStartE2EDuration="1m8.369169673s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:17:39.347233594 +0000 UTC m=+90.538542313" watchObservedRunningTime="2025-12-02 14:17:39.369169673 +0000 UTC m=+90.560478382" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.369417 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-jm696" podStartSLOduration=68.369410039 podStartE2EDuration="1m8.369410039s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:17:39.368750961 +0000 UTC m=+90.560059670" watchObservedRunningTime="2025-12-02 14:17:39.369410039 +0000 UTC m=+90.560718748" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.380645 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=22.380625265 podStartE2EDuration="22.380625265s" podCreationTimestamp="2025-12-02 14:17:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:17:39.379959097 +0000 UTC m=+90.571267806" watchObservedRunningTime="2025-12-02 14:17:39.380625265 +0000 UTC m=+90.571933974" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.405389 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=68.405372883 podStartE2EDuration="1m8.405372883s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:17:39.404611651 +0000 UTC m=+90.595920360" watchObservedRunningTime="2025-12-02 14:17:39.405372883 +0000 UTC m=+90.596681592" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.438071 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.438124 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.438139 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.438158 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.438170 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:39Z","lastTransitionTime":"2025-12-02T14:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.541230 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.541302 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.541316 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.541339 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.541353 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:39Z","lastTransitionTime":"2025-12-02T14:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.644479 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.644542 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.644600 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.644626 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.644646 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:39Z","lastTransitionTime":"2025-12-02T14:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.747470 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.747895 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.748048 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.748198 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.748336 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:39Z","lastTransitionTime":"2025-12-02T14:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.852112 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.852161 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.852178 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.852200 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.852216 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:39Z","lastTransitionTime":"2025-12-02T14:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.954408 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.954474 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.954520 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.954549 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:39 crc kubenswrapper[4902]: I1202 14:17:39.954602 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:39Z","lastTransitionTime":"2025-12-02T14:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.057400 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.058617 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.058811 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.058979 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.059127 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:40Z","lastTransitionTime":"2025-12-02T14:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.105732 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:40 crc kubenswrapper[4902]: E1202 14:17:40.105879 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.162110 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.162175 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.162194 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.162217 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.162234 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:40Z","lastTransitionTime":"2025-12-02T14:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.264483 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.264817 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.264835 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.264850 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.264861 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:40Z","lastTransitionTime":"2025-12-02T14:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.368478 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.368538 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.368556 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.368614 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.368631 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:40Z","lastTransitionTime":"2025-12-02T14:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.471826 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.472138 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.472305 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.472453 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.472620 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:40Z","lastTransitionTime":"2025-12-02T14:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.575852 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.575895 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.575911 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.575938 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.575955 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:40Z","lastTransitionTime":"2025-12-02T14:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.679255 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.679369 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.679395 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.679424 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.679444 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:40Z","lastTransitionTime":"2025-12-02T14:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.782336 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.782455 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.782483 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.782513 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.782534 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:40Z","lastTransitionTime":"2025-12-02T14:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.886461 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.886547 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.886611 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.886639 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.886680 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:40Z","lastTransitionTime":"2025-12-02T14:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.991792 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.991854 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.991870 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.991894 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:40 crc kubenswrapper[4902]: I1202 14:17:40.991912 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:40Z","lastTransitionTime":"2025-12-02T14:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.095281 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.096310 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.096507 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.096741 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.096926 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:41Z","lastTransitionTime":"2025-12-02T14:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.105670 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.105717 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:41 crc kubenswrapper[4902]: E1202 14:17:41.106208 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.105747 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:41 crc kubenswrapper[4902]: E1202 14:17:41.106319 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:41 crc kubenswrapper[4902]: E1202 14:17:41.106620 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.201237 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.201300 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.201320 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.201344 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.201387 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:41Z","lastTransitionTime":"2025-12-02T14:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.304720 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.304800 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.304823 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.304854 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.304878 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:41Z","lastTransitionTime":"2025-12-02T14:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.409304 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.409359 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.409376 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.409401 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.409419 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:41Z","lastTransitionTime":"2025-12-02T14:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.512015 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.512077 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.512101 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.512131 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.512152 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:41Z","lastTransitionTime":"2025-12-02T14:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.615761 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.615824 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.615849 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.615879 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.615899 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:41Z","lastTransitionTime":"2025-12-02T14:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.721054 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.721364 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.721501 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.721669 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.721802 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:41Z","lastTransitionTime":"2025-12-02T14:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.824449 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.824756 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.824890 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.824973 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.825035 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:41Z","lastTransitionTime":"2025-12-02T14:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.927339 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.927381 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.927390 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.927403 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:41 crc kubenswrapper[4902]: I1202 14:17:41.927412 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:41Z","lastTransitionTime":"2025-12-02T14:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.030173 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.030221 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.030237 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.030260 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.030279 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:42Z","lastTransitionTime":"2025-12-02T14:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.105549 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:42 crc kubenswrapper[4902]: E1202 14:17:42.105728 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.151005 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.151055 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.151072 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.151096 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.151116 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:42Z","lastTransitionTime":"2025-12-02T14:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.253419 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.253476 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.253492 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.253515 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.253532 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:42Z","lastTransitionTime":"2025-12-02T14:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.356659 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.356735 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.356762 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.356792 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.356817 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:42Z","lastTransitionTime":"2025-12-02T14:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.459286 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.459342 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.459360 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.459382 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.459399 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:42Z","lastTransitionTime":"2025-12-02T14:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.562496 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.562557 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.562615 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.562641 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.562658 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:42Z","lastTransitionTime":"2025-12-02T14:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.567109 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.567174 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.567200 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.567226 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.567248 4902 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T14:17:42Z","lastTransitionTime":"2025-12-02T14:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.619336 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh"] Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.619795 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.622846 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.623239 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.623623 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.624230 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.653525 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e88f328-8753-4b5a-919a-0f6e4d02bbf6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-d7bzh\" (UID: \"6e88f328-8753-4b5a-919a-0f6e4d02bbf6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.653804 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/6e88f328-8753-4b5a-919a-0f6e4d02bbf6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-d7bzh\" (UID: \"6e88f328-8753-4b5a-919a-0f6e4d02bbf6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.653943 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6e88f328-8753-4b5a-919a-0f6e4d02bbf6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-d7bzh\" (UID: \"6e88f328-8753-4b5a-919a-0f6e4d02bbf6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.654044 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6e88f328-8753-4b5a-919a-0f6e4d02bbf6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-d7bzh\" (UID: \"6e88f328-8753-4b5a-919a-0f6e4d02bbf6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.654144 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/6e88f328-8753-4b5a-919a-0f6e4d02bbf6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-d7bzh\" (UID: \"6e88f328-8753-4b5a-919a-0f6e4d02bbf6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.755294 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e88f328-8753-4b5a-919a-0f6e4d02bbf6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-d7bzh\" (UID: \"6e88f328-8753-4b5a-919a-0f6e4d02bbf6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.755360 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/6e88f328-8753-4b5a-919a-0f6e4d02bbf6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-d7bzh\" (UID: \"6e88f328-8753-4b5a-919a-0f6e4d02bbf6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.755418 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6e88f328-8753-4b5a-919a-0f6e4d02bbf6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-d7bzh\" (UID: \"6e88f328-8753-4b5a-919a-0f6e4d02bbf6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.755448 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6e88f328-8753-4b5a-919a-0f6e4d02bbf6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-d7bzh\" (UID: \"6e88f328-8753-4b5a-919a-0f6e4d02bbf6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.755480 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/6e88f328-8753-4b5a-919a-0f6e4d02bbf6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-d7bzh\" (UID: \"6e88f328-8753-4b5a-919a-0f6e4d02bbf6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.755644 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/6e88f328-8753-4b5a-919a-0f6e4d02bbf6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-d7bzh\" (UID: \"6e88f328-8753-4b5a-919a-0f6e4d02bbf6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.756306 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/6e88f328-8753-4b5a-919a-0f6e4d02bbf6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-d7bzh\" (UID: \"6e88f328-8753-4b5a-919a-0f6e4d02bbf6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.757435 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6e88f328-8753-4b5a-919a-0f6e4d02bbf6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-d7bzh\" (UID: \"6e88f328-8753-4b5a-919a-0f6e4d02bbf6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.766995 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e88f328-8753-4b5a-919a-0f6e4d02bbf6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-d7bzh\" (UID: \"6e88f328-8753-4b5a-919a-0f6e4d02bbf6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.785132 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6e88f328-8753-4b5a-919a-0f6e4d02bbf6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-d7bzh\" (UID: \"6e88f328-8753-4b5a-919a-0f6e4d02bbf6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:42 crc kubenswrapper[4902]: I1202 14:17:42.934461 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" Dec 02 14:17:43 crc kubenswrapper[4902]: I1202 14:17:43.106453 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:43 crc kubenswrapper[4902]: I1202 14:17:43.106860 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:43 crc kubenswrapper[4902]: I1202 14:17:43.106884 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:43 crc kubenswrapper[4902]: E1202 14:17:43.108210 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:43 crc kubenswrapper[4902]: E1202 14:17:43.108384 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:43 crc kubenswrapper[4902]: E1202 14:17:43.108538 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:43 crc kubenswrapper[4902]: I1202 14:17:43.670807 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" event={"ID":"6e88f328-8753-4b5a-919a-0f6e4d02bbf6","Type":"ContainerStarted","Data":"cd1e9e138625bd63caf502b6bf3ebb8bce05755a12b9dfb65ef0e6a587fdab0c"} Dec 02 14:17:44 crc kubenswrapper[4902]: I1202 14:17:44.106210 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:44 crc kubenswrapper[4902]: E1202 14:17:44.106398 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:44 crc kubenswrapper[4902]: I1202 14:17:44.107224 4902 scope.go:117] "RemoveContainer" containerID="cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d" Dec 02 14:17:44 crc kubenswrapper[4902]: E1202 14:17:44.107416 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-q55jp_openshift-ovn-kubernetes(9379a49e-d66c-4224-a564-f00d4cadd9ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" Dec 02 14:17:44 crc kubenswrapper[4902]: I1202 14:17:44.674456 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" event={"ID":"6e88f328-8753-4b5a-919a-0f6e4d02bbf6","Type":"ContainerStarted","Data":"56c72313ea1cb91514ca5e7266e6bc06ba3e3632c40c162c37b8703c179ce1ef"} Dec 02 14:17:44 crc kubenswrapper[4902]: I1202 14:17:44.691451 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-d7bzh" podStartSLOduration=73.691419448 podStartE2EDuration="1m13.691419448s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:17:44.690531793 +0000 UTC m=+95.881840512" watchObservedRunningTime="2025-12-02 14:17:44.691419448 +0000 UTC m=+95.882728197" Dec 02 14:17:45 crc kubenswrapper[4902]: I1202 14:17:45.106485 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:45 crc kubenswrapper[4902]: I1202 14:17:45.106539 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:45 crc kubenswrapper[4902]: I1202 14:17:45.106633 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:45 crc kubenswrapper[4902]: E1202 14:17:45.106951 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:45 crc kubenswrapper[4902]: E1202 14:17:45.107039 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:45 crc kubenswrapper[4902]: E1202 14:17:45.107203 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:46 crc kubenswrapper[4902]: I1202 14:17:46.105615 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:46 crc kubenswrapper[4902]: E1202 14:17:46.105874 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:47 crc kubenswrapper[4902]: I1202 14:17:47.106116 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:47 crc kubenswrapper[4902]: E1202 14:17:47.106312 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:47 crc kubenswrapper[4902]: I1202 14:17:47.106380 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:47 crc kubenswrapper[4902]: E1202 14:17:47.106636 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:47 crc kubenswrapper[4902]: I1202 14:17:47.106809 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:47 crc kubenswrapper[4902]: E1202 14:17:47.106958 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:48 crc kubenswrapper[4902]: I1202 14:17:48.106326 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:48 crc kubenswrapper[4902]: E1202 14:17:48.106469 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:49 crc kubenswrapper[4902]: I1202 14:17:49.106183 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:49 crc kubenswrapper[4902]: I1202 14:17:49.106229 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:49 crc kubenswrapper[4902]: I1202 14:17:49.106289 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:49 crc kubenswrapper[4902]: E1202 14:17:49.107641 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:49 crc kubenswrapper[4902]: E1202 14:17:49.107790 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:49 crc kubenswrapper[4902]: E1202 14:17:49.108230 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:50 crc kubenswrapper[4902]: I1202 14:17:50.106327 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:50 crc kubenswrapper[4902]: E1202 14:17:50.106950 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:50 crc kubenswrapper[4902]: I1202 14:17:50.647449 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs\") pod \"network-metrics-daemon-tlnwn\" (UID: \"b63b8b19-f855-4038-891d-6bfd1e5021de\") " pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:50 crc kubenswrapper[4902]: E1202 14:17:50.647687 4902 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:17:50 crc kubenswrapper[4902]: E1202 14:17:50.647795 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs podName:b63b8b19-f855-4038-891d-6bfd1e5021de nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.647769734 +0000 UTC m=+165.839078533 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs") pod "network-metrics-daemon-tlnwn" (UID: "b63b8b19-f855-4038-891d-6bfd1e5021de") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 14:17:51 crc kubenswrapper[4902]: I1202 14:17:51.106018 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:51 crc kubenswrapper[4902]: I1202 14:17:51.106091 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:51 crc kubenswrapper[4902]: I1202 14:17:51.106239 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:51 crc kubenswrapper[4902]: E1202 14:17:51.106230 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:51 crc kubenswrapper[4902]: E1202 14:17:51.106476 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:51 crc kubenswrapper[4902]: E1202 14:17:51.106650 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:52 crc kubenswrapper[4902]: I1202 14:17:52.106527 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:52 crc kubenswrapper[4902]: E1202 14:17:52.106711 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:53 crc kubenswrapper[4902]: I1202 14:17:53.106459 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:53 crc kubenswrapper[4902]: I1202 14:17:53.106497 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:53 crc kubenswrapper[4902]: E1202 14:17:53.107060 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:53 crc kubenswrapper[4902]: I1202 14:17:53.106506 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:53 crc kubenswrapper[4902]: E1202 14:17:53.107105 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:53 crc kubenswrapper[4902]: E1202 14:17:53.107166 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:54 crc kubenswrapper[4902]: I1202 14:17:54.106231 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:54 crc kubenswrapper[4902]: E1202 14:17:54.106449 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:55 crc kubenswrapper[4902]: I1202 14:17:55.106202 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:55 crc kubenswrapper[4902]: I1202 14:17:55.106599 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:55 crc kubenswrapper[4902]: E1202 14:17:55.106626 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:55 crc kubenswrapper[4902]: E1202 14:17:55.106812 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:55 crc kubenswrapper[4902]: I1202 14:17:55.106928 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:55 crc kubenswrapper[4902]: E1202 14:17:55.107134 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:56 crc kubenswrapper[4902]: I1202 14:17:56.106188 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:56 crc kubenswrapper[4902]: E1202 14:17:56.106477 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:57 crc kubenswrapper[4902]: I1202 14:17:57.105820 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:57 crc kubenswrapper[4902]: I1202 14:17:57.105873 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:57 crc kubenswrapper[4902]: I1202 14:17:57.105895 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:57 crc kubenswrapper[4902]: E1202 14:17:57.106018 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:57 crc kubenswrapper[4902]: E1202 14:17:57.106139 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:57 crc kubenswrapper[4902]: E1202 14:17:57.106269 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:17:57 crc kubenswrapper[4902]: I1202 14:17:57.107456 4902 scope.go:117] "RemoveContainer" containerID="cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d" Dec 02 14:17:57 crc kubenswrapper[4902]: E1202 14:17:57.107773 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-q55jp_openshift-ovn-kubernetes(9379a49e-d66c-4224-a564-f00d4cadd9ea)\"" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" Dec 02 14:17:58 crc kubenswrapper[4902]: I1202 14:17:58.106472 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:17:58 crc kubenswrapper[4902]: E1202 14:17:58.106657 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:17:59 crc kubenswrapper[4902]: I1202 14:17:59.106434 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:17:59 crc kubenswrapper[4902]: E1202 14:17:59.108234 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:17:59 crc kubenswrapper[4902]: I1202 14:17:59.108278 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:17:59 crc kubenswrapper[4902]: E1202 14:17:59.108463 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:17:59 crc kubenswrapper[4902]: I1202 14:17:59.108329 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:17:59 crc kubenswrapper[4902]: E1202 14:17:59.108626 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:00 crc kubenswrapper[4902]: I1202 14:18:00.106296 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:00 crc kubenswrapper[4902]: E1202 14:18:00.106464 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:18:01 crc kubenswrapper[4902]: I1202 14:18:01.105912 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:01 crc kubenswrapper[4902]: I1202 14:18:01.105981 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:01 crc kubenswrapper[4902]: I1202 14:18:01.106054 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:01 crc kubenswrapper[4902]: E1202 14:18:01.106056 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:01 crc kubenswrapper[4902]: E1202 14:18:01.106154 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:18:01 crc kubenswrapper[4902]: E1202 14:18:01.106223 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:18:02 crc kubenswrapper[4902]: I1202 14:18:02.105619 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:02 crc kubenswrapper[4902]: E1202 14:18:02.106527 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:18:03 crc kubenswrapper[4902]: I1202 14:18:03.106477 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:03 crc kubenswrapper[4902]: I1202 14:18:03.106505 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:03 crc kubenswrapper[4902]: E1202 14:18:03.106752 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:03 crc kubenswrapper[4902]: I1202 14:18:03.106772 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:03 crc kubenswrapper[4902]: E1202 14:18:03.106862 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:18:03 crc kubenswrapper[4902]: E1202 14:18:03.106927 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:18:04 crc kubenswrapper[4902]: I1202 14:18:04.106494 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:04 crc kubenswrapper[4902]: E1202 14:18:04.106664 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:18:05 crc kubenswrapper[4902]: I1202 14:18:05.106744 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:05 crc kubenswrapper[4902]: I1202 14:18:05.106781 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:05 crc kubenswrapper[4902]: I1202 14:18:05.106871 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:05 crc kubenswrapper[4902]: E1202 14:18:05.106984 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:05 crc kubenswrapper[4902]: E1202 14:18:05.107074 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:18:05 crc kubenswrapper[4902]: E1202 14:18:05.107144 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:18:06 crc kubenswrapper[4902]: I1202 14:18:06.105610 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:06 crc kubenswrapper[4902]: E1202 14:18:06.105770 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:18:07 crc kubenswrapper[4902]: I1202 14:18:07.106313 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:07 crc kubenswrapper[4902]: I1202 14:18:07.106310 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:07 crc kubenswrapper[4902]: E1202 14:18:07.106513 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:07 crc kubenswrapper[4902]: E1202 14:18:07.106713 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:18:07 crc kubenswrapper[4902]: I1202 14:18:07.107273 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:07 crc kubenswrapper[4902]: E1202 14:18:07.107434 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:18:08 crc kubenswrapper[4902]: I1202 14:18:08.106118 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:08 crc kubenswrapper[4902]: E1202 14:18:08.106239 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:18:08 crc kubenswrapper[4902]: I1202 14:18:08.757743 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vm9q6_381fdb33-d71e-468a-9b1e-a2920c32f8ae/kube-multus/1.log" Dec 02 14:18:08 crc kubenswrapper[4902]: I1202 14:18:08.758677 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vm9q6_381fdb33-d71e-468a-9b1e-a2920c32f8ae/kube-multus/0.log" Dec 02 14:18:08 crc kubenswrapper[4902]: I1202 14:18:08.758736 4902 generic.go:334] "Generic (PLEG): container finished" podID="381fdb33-d71e-468a-9b1e-a2920c32f8ae" containerID="2b22a8b4622242be62449d68734ec9d4df52cf0cb435e7c994124c321a57750b" exitCode=1 Dec 02 14:18:08 crc kubenswrapper[4902]: I1202 14:18:08.758769 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vm9q6" event={"ID":"381fdb33-d71e-468a-9b1e-a2920c32f8ae","Type":"ContainerDied","Data":"2b22a8b4622242be62449d68734ec9d4df52cf0cb435e7c994124c321a57750b"} Dec 02 14:18:08 crc kubenswrapper[4902]: I1202 14:18:08.758812 4902 scope.go:117] "RemoveContainer" containerID="644f8ab5b76cb430fa9267f37265271d5832b20aec62d40bc60598d71e0613b5" Dec 02 14:18:08 crc kubenswrapper[4902]: I1202 14:18:08.759109 4902 scope.go:117] "RemoveContainer" containerID="2b22a8b4622242be62449d68734ec9d4df52cf0cb435e7c994124c321a57750b" Dec 02 14:18:08 crc kubenswrapper[4902]: E1202 14:18:08.759295 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-vm9q6_openshift-multus(381fdb33-d71e-468a-9b1e-a2920c32f8ae)\"" pod="openshift-multus/multus-vm9q6" podUID="381fdb33-d71e-468a-9b1e-a2920c32f8ae" Dec 02 14:18:09 crc kubenswrapper[4902]: I1202 14:18:09.105758 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:09 crc kubenswrapper[4902]: I1202 14:18:09.105814 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:09 crc kubenswrapper[4902]: E1202 14:18:09.105891 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:09 crc kubenswrapper[4902]: E1202 14:18:09.108813 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:18:09 crc kubenswrapper[4902]: I1202 14:18:09.108885 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:09 crc kubenswrapper[4902]: E1202 14:18:09.109091 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:18:09 crc kubenswrapper[4902]: E1202 14:18:09.129810 4902 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 02 14:18:09 crc kubenswrapper[4902]: E1202 14:18:09.194106 4902 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 14:18:09 crc kubenswrapper[4902]: I1202 14:18:09.764662 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vm9q6_381fdb33-d71e-468a-9b1e-a2920c32f8ae/kube-multus/1.log" Dec 02 14:18:10 crc kubenswrapper[4902]: I1202 14:18:10.106399 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:10 crc kubenswrapper[4902]: E1202 14:18:10.106620 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:18:11 crc kubenswrapper[4902]: I1202 14:18:11.105685 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:11 crc kubenswrapper[4902]: E1202 14:18:11.105872 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:11 crc kubenswrapper[4902]: I1202 14:18:11.105892 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:11 crc kubenswrapper[4902]: I1202 14:18:11.105944 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:11 crc kubenswrapper[4902]: E1202 14:18:11.106283 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:18:11 crc kubenswrapper[4902]: E1202 14:18:11.106329 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:18:12 crc kubenswrapper[4902]: I1202 14:18:12.106051 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:12 crc kubenswrapper[4902]: E1202 14:18:12.106240 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:18:12 crc kubenswrapper[4902]: I1202 14:18:12.107525 4902 scope.go:117] "RemoveContainer" containerID="cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d" Dec 02 14:18:13 crc kubenswrapper[4902]: I1202 14:18:13.105927 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:13 crc kubenswrapper[4902]: I1202 14:18:13.105976 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:13 crc kubenswrapper[4902]: E1202 14:18:13.106409 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:13 crc kubenswrapper[4902]: E1202 14:18:13.106596 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:18:13 crc kubenswrapper[4902]: I1202 14:18:13.106095 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:13 crc kubenswrapper[4902]: E1202 14:18:13.106728 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:18:13 crc kubenswrapper[4902]: I1202 14:18:13.780529 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/3.log" Dec 02 14:18:13 crc kubenswrapper[4902]: I1202 14:18:13.783481 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerStarted","Data":"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9"} Dec 02 14:18:13 crc kubenswrapper[4902]: I1202 14:18:13.784714 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:18:13 crc kubenswrapper[4902]: I1202 14:18:13.822283 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podStartSLOduration=102.822262014 podStartE2EDuration="1m42.822262014s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:13.821593776 +0000 UTC m=+125.012902485" watchObservedRunningTime="2025-12-02 14:18:13.822262014 +0000 UTC m=+125.013570753" Dec 02 14:18:13 crc kubenswrapper[4902]: I1202 14:18:13.934236 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-tlnwn"] Dec 02 14:18:13 crc kubenswrapper[4902]: I1202 14:18:13.934460 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:13 crc kubenswrapper[4902]: E1202 14:18:13.934685 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:18:14 crc kubenswrapper[4902]: E1202 14:18:14.195703 4902 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 14:18:15 crc kubenswrapper[4902]: I1202 14:18:15.105910 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:15 crc kubenswrapper[4902]: E1202 14:18:15.106397 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:15 crc kubenswrapper[4902]: I1202 14:18:15.105998 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:15 crc kubenswrapper[4902]: I1202 14:18:15.105998 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:15 crc kubenswrapper[4902]: E1202 14:18:15.106521 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:18:15 crc kubenswrapper[4902]: E1202 14:18:15.106690 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:18:16 crc kubenswrapper[4902]: I1202 14:18:16.106428 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:16 crc kubenswrapper[4902]: E1202 14:18:16.106696 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:18:17 crc kubenswrapper[4902]: I1202 14:18:17.105906 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:17 crc kubenswrapper[4902]: I1202 14:18:17.105975 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:17 crc kubenswrapper[4902]: E1202 14:18:17.106132 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:17 crc kubenswrapper[4902]: E1202 14:18:17.106296 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:18:17 crc kubenswrapper[4902]: I1202 14:18:17.106331 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:17 crc kubenswrapper[4902]: E1202 14:18:17.106521 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:18:18 crc kubenswrapper[4902]: I1202 14:18:18.105955 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:18 crc kubenswrapper[4902]: E1202 14:18:18.106168 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:18:19 crc kubenswrapper[4902]: I1202 14:18:19.107901 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:19 crc kubenswrapper[4902]: I1202 14:18:19.107996 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:19 crc kubenswrapper[4902]: I1202 14:18:19.108083 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:19 crc kubenswrapper[4902]: E1202 14:18:19.108137 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:19 crc kubenswrapper[4902]: E1202 14:18:19.108375 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:18:19 crc kubenswrapper[4902]: E1202 14:18:19.108429 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:18:19 crc kubenswrapper[4902]: E1202 14:18:19.196410 4902 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 14:18:20 crc kubenswrapper[4902]: I1202 14:18:20.106341 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:20 crc kubenswrapper[4902]: E1202 14:18:20.106512 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:18:21 crc kubenswrapper[4902]: I1202 14:18:21.106450 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:21 crc kubenswrapper[4902]: I1202 14:18:21.106501 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:21 crc kubenswrapper[4902]: I1202 14:18:21.106450 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:21 crc kubenswrapper[4902]: E1202 14:18:21.106606 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:18:21 crc kubenswrapper[4902]: E1202 14:18:21.106777 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:18:21 crc kubenswrapper[4902]: E1202 14:18:21.106826 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:22 crc kubenswrapper[4902]: I1202 14:18:22.106331 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:22 crc kubenswrapper[4902]: E1202 14:18:22.106894 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:18:22 crc kubenswrapper[4902]: I1202 14:18:22.106926 4902 scope.go:117] "RemoveContainer" containerID="2b22a8b4622242be62449d68734ec9d4df52cf0cb435e7c994124c321a57750b" Dec 02 14:18:23 crc kubenswrapper[4902]: I1202 14:18:23.105636 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:23 crc kubenswrapper[4902]: I1202 14:18:23.105678 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:23 crc kubenswrapper[4902]: E1202 14:18:23.106549 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:23 crc kubenswrapper[4902]: I1202 14:18:23.105741 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:23 crc kubenswrapper[4902]: E1202 14:18:23.106698 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:18:23 crc kubenswrapper[4902]: E1202 14:18:23.107056 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:18:24 crc kubenswrapper[4902]: I1202 14:18:24.106068 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:24 crc kubenswrapper[4902]: E1202 14:18:24.106224 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:18:24 crc kubenswrapper[4902]: E1202 14:18:24.198122 4902 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 14:18:24 crc kubenswrapper[4902]: I1202 14:18:24.826643 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vm9q6_381fdb33-d71e-468a-9b1e-a2920c32f8ae/kube-multus/1.log" Dec 02 14:18:24 crc kubenswrapper[4902]: I1202 14:18:24.826693 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vm9q6" event={"ID":"381fdb33-d71e-468a-9b1e-a2920c32f8ae","Type":"ContainerStarted","Data":"fe4723b6a8f30dd9538ec0aeab70c283c879f9591ce80fad74ace550d3827292"} Dec 02 14:18:25 crc kubenswrapper[4902]: I1202 14:18:25.106391 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:25 crc kubenswrapper[4902]: I1202 14:18:25.106455 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:25 crc kubenswrapper[4902]: E1202 14:18:25.106669 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:18:25 crc kubenswrapper[4902]: I1202 14:18:25.106813 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:25 crc kubenswrapper[4902]: E1202 14:18:25.107419 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:25 crc kubenswrapper[4902]: E1202 14:18:25.107499 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:18:26 crc kubenswrapper[4902]: I1202 14:18:26.106195 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:26 crc kubenswrapper[4902]: E1202 14:18:26.107146 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:18:27 crc kubenswrapper[4902]: I1202 14:18:27.106025 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:27 crc kubenswrapper[4902]: I1202 14:18:27.106087 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:27 crc kubenswrapper[4902]: I1202 14:18:27.106127 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:27 crc kubenswrapper[4902]: E1202 14:18:27.106298 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:27 crc kubenswrapper[4902]: E1202 14:18:27.107087 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:18:27 crc kubenswrapper[4902]: E1202 14:18:27.107194 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:18:28 crc kubenswrapper[4902]: I1202 14:18:28.105604 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:28 crc kubenswrapper[4902]: E1202 14:18:28.105855 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tlnwn" podUID="b63b8b19-f855-4038-891d-6bfd1e5021de" Dec 02 14:18:29 crc kubenswrapper[4902]: I1202 14:18:29.106252 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:29 crc kubenswrapper[4902]: I1202 14:18:29.106272 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:29 crc kubenswrapper[4902]: E1202 14:18:29.107667 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 14:18:29 crc kubenswrapper[4902]: E1202 14:18:29.107913 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 14:18:29 crc kubenswrapper[4902]: I1202 14:18:29.108151 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:29 crc kubenswrapper[4902]: E1202 14:18:29.108368 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 14:18:30 crc kubenswrapper[4902]: I1202 14:18:30.067782 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:18:30 crc kubenswrapper[4902]: I1202 14:18:30.106399 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:30 crc kubenswrapper[4902]: I1202 14:18:30.113258 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 02 14:18:30 crc kubenswrapper[4902]: I1202 14:18:30.113358 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 02 14:18:31 crc kubenswrapper[4902]: I1202 14:18:31.105712 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:31 crc kubenswrapper[4902]: I1202 14:18:31.105818 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:31 crc kubenswrapper[4902]: I1202 14:18:31.105843 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:31 crc kubenswrapper[4902]: I1202 14:18:31.109325 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 02 14:18:31 crc kubenswrapper[4902]: I1202 14:18:31.109777 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 02 14:18:31 crc kubenswrapper[4902]: I1202 14:18:31.110844 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 02 14:18:31 crc kubenswrapper[4902]: I1202 14:18:31.110859 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.924712 4902 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.971771 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5g67q"] Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.976383 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc"] Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.977037 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.977098 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.982117 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.982128 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.982506 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.982741 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.984488 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.984638 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq"] Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.985236 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.985902 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-mxmxt"] Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.986653 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-vm2hb"] Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.988164 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.986821 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.985985 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.986074 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.986129 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.986272 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.986397 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.986924 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.987880 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.987941 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.987947 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.992657 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.992809 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.992910 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.993089 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.994191 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.994353 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.994500 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.998189 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g9hgb"] Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.998595 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-v5x6s"] Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.998850 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht"] Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.999123 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.999158 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:33 crc kubenswrapper[4902]: I1202 14:18:33.999123 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.003204 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-86crq"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.003800 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.004141 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.004452 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.004994 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.005260 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.009324 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5g67q"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.014825 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.015078 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.015488 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.015906 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.015968 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.016084 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.016161 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.016227 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.016366 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.016443 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.016476 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.016721 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.016723 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.016371 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.016544 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.016910 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.016502 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.017098 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.016868 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.017022 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.017458 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.017727 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.016650 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.018076 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.018716 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2sdv9"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.019245 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.022801 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.023107 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.023200 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.023427 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.023778 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026082 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026652 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-svzpr"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.027065 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-svzpr" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026112 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.027523 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026192 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026249 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026278 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026314 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026606 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026668 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026699 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026729 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026769 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026805 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026833 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026869 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026902 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026957 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.026996 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.029546 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.029629 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.029751 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.047295 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.049177 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.049379 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.049600 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.049950 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-8bcjv"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.051338 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-k4csg"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.052156 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.052784 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.058279 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.075071 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-xxldq"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.075757 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-pkj67"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.076131 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.076590 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.077039 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-xxldq" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.077163 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.077414 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.077052 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.078071 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.081777 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.082232 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jzjr6"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.082921 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jzjr6" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.083296 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.088626 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-lgl7q"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.089271 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.089903 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-gh8sw"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.090680 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.091533 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.092113 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.096160 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.097756 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ghgfc"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.098319 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.098922 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.099533 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-m8fcn"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.100041 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-m8fcn" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.102004 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.102343 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rxhgz"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.102647 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qmcz9"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.103081 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-qmcz9" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.103280 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rxhgz" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.103548 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.114414 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.127782 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.129964 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.131600 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.132735 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.133007 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.134956 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.135466 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.140030 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.141847 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.144826 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rgrlp"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.145295 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-rgrlp" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.150362 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.150988 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-v5x6s"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.151082 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.152782 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-mxmxt"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.154239 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.154600 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.154632 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.155218 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.155936 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.156434 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.156633 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.156793 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.157226 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.158390 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.158385 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.158928 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.159260 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.160367 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.160855 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a21648d7-b40f-4da6-a228-c44b785c788a-config\") pod \"kube-apiserver-operator-766d6c64bb-6n55q\" (UID: \"a21648d7-b40f-4da6-a228-c44b785c788a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.160886 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2996d85c-85fd-4462-9f50-b5dea4b5a762-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-ks9ht\" (UID: \"2996d85c-85fd-4462-9f50-b5dea4b5a762\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.160904 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e7263104-6357-4d41-a133-faf27fb96fd4-console-serving-cert\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.160921 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90af0e3f-43b4-4349-933b-1d9c22b76438-config\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.160939 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gmh5\" (UniqueName: \"kubernetes.io/projected/93abc37a-cc70-4b86-bd3f-4d3945d029f4-kube-api-access-7gmh5\") pod \"machine-api-operator-5694c8668f-mxmxt\" (UID: \"93abc37a-cc70-4b86-bd3f-4d3945d029f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.160956 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d0119aeb-1cd1-4503-ba6d-062f07f01491-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-v5x6s\" (UID: \"d0119aeb-1cd1-4503-ba6d-062f07f01491\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.160973 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/16d60e4a-c323-46b9-98ac-7ce9e1949e9f-bound-sa-token\") pod \"ingress-operator-5b745b69d9-qm22x\" (UID: \"16d60e4a-c323-46b9-98ac-7ce9e1949e9f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.160988 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a21648d7-b40f-4da6-a228-c44b785c788a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-6n55q\" (UID: \"a21648d7-b40f-4da6-a228-c44b785c788a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161006 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d0119aeb-1cd1-4503-ba6d-062f07f01491-serving-cert\") pod \"authentication-operator-69f744f599-v5x6s\" (UID: \"d0119aeb-1cd1-4503-ba6d-062f07f01491\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161021 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2n9wm\" (UniqueName: \"kubernetes.io/projected/f61a29cb-55b4-42b7-817f-67600cb70980-kube-api-access-2n9wm\") pod \"route-controller-manager-6576b87f9c-x2flc\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161037 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c2ef3a5-4e54-4162-bef2-60a40dc206f7-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-kqkfj\" (UID: \"2c2ef3a5-4e54-4162-bef2-60a40dc206f7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161050 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c2ef3a5-4e54-4162-bef2-60a40dc206f7-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-kqkfj\" (UID: \"2c2ef3a5-4e54-4162-bef2-60a40dc206f7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161071 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bv2pn\" (UniqueName: \"kubernetes.io/projected/6244913f-facc-4fe2-be8c-09238b2704d6-kube-api-access-bv2pn\") pod \"controller-manager-879f6c89f-5g67q\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161101 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpnlt\" (UniqueName: \"kubernetes.io/projected/9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292-kube-api-access-wpnlt\") pod \"cluster-image-registry-operator-dc59b4c8b-lzbvn\" (UID: \"9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161145 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e5dcbc97-fe74-4313-8205-4b8fd0eb2c13-metrics-tls\") pod \"dns-operator-744455d44c-svzpr\" (UID: \"e5dcbc97-fe74-4313-8205-4b8fd0eb2c13\") " pod="openshift-dns-operator/dns-operator-744455d44c-svzpr" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161166 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-oauth-serving-cert\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161196 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a21648d7-b40f-4da6-a228-c44b785c788a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-6n55q\" (UID: \"a21648d7-b40f-4da6-a228-c44b785c788a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161221 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/90af0e3f-43b4-4349-933b-1d9c22b76438-trusted-ca-bundle\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161246 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6244913f-facc-4fe2-be8c-09238b2704d6-serving-cert\") pod \"controller-manager-879f6c89f-5g67q\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161262 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-console-config\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161279 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93abc37a-cc70-4b86-bd3f-4d3945d029f4-config\") pod \"machine-api-operator-5694c8668f-mxmxt\" (UID: \"93abc37a-cc70-4b86-bd3f-4d3945d029f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161294 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f61a29cb-55b4-42b7-817f-67600cb70980-serving-cert\") pod \"route-controller-manager-6576b87f9c-x2flc\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161319 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lzbvn\" (UID: \"9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161686 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/90af0e3f-43b4-4349-933b-1d9c22b76438-encryption-config\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161720 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bczhb\" (UniqueName: \"kubernetes.io/projected/d0119aeb-1cd1-4503-ba6d-062f07f01491-kube-api-access-bczhb\") pod \"authentication-operator-69f744f599-v5x6s\" (UID: \"d0119aeb-1cd1-4503-ba6d-062f07f01491\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161737 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4bxh\" (UniqueName: \"kubernetes.io/projected/90af0e3f-43b4-4349-933b-1d9c22b76438-kube-api-access-t4bxh\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161753 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/16d60e4a-c323-46b9-98ac-7ce9e1949e9f-trusted-ca\") pod \"ingress-operator-5b745b69d9-qm22x\" (UID: \"16d60e4a-c323-46b9-98ac-7ce9e1949e9f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161768 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcgmc\" (UniqueName: \"kubernetes.io/projected/e7263104-6357-4d41-a133-faf27fb96fd4-kube-api-access-xcgmc\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161783 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/16d60e4a-c323-46b9-98ac-7ce9e1949e9f-metrics-tls\") pod \"ingress-operator-5b745b69d9-qm22x\" (UID: \"16d60e4a-c323-46b9-98ac-7ce9e1949e9f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161797 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lzbvn\" (UID: \"9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161814 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-service-ca\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161830 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-client-ca\") pod \"controller-manager-879f6c89f-5g67q\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161846 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/90af0e3f-43b4-4349-933b-1d9c22b76438-etcd-serving-ca\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161860 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/90af0e3f-43b4-4349-933b-1d9c22b76438-audit-dir\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161877 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f61a29cb-55b4-42b7-817f-67600cb70980-client-ca\") pod \"route-controller-manager-6576b87f9c-x2flc\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161898 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-5g67q\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161914 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lzbvn\" (UID: \"9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161927 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90af0e3f-43b4-4349-933b-1d9c22b76438-serving-cert\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161948 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/93abc37a-cc70-4b86-bd3f-4d3945d029f4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-mxmxt\" (UID: \"93abc37a-cc70-4b86-bd3f-4d3945d029f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161966 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/90af0e3f-43b4-4349-933b-1d9c22b76438-audit\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161985 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-config\") pod \"controller-manager-879f6c89f-5g67q\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.161999 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/93abc37a-cc70-4b86-bd3f-4d3945d029f4-images\") pod \"machine-api-operator-5694c8668f-mxmxt\" (UID: \"93abc37a-cc70-4b86-bd3f-4d3945d029f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.162012 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d0119aeb-1cd1-4503-ba6d-062f07f01491-service-ca-bundle\") pod \"authentication-operator-69f744f599-v5x6s\" (UID: \"d0119aeb-1cd1-4503-ba6d-062f07f01491\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.162028 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gwjd\" (UniqueName: \"kubernetes.io/projected/16d60e4a-c323-46b9-98ac-7ce9e1949e9f-kube-api-access-2gwjd\") pod \"ingress-operator-5b745b69d9-qm22x\" (UID: \"16d60e4a-c323-46b9-98ac-7ce9e1949e9f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.162044 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cb26x\" (UniqueName: \"kubernetes.io/projected/e5dcbc97-fe74-4313-8205-4b8fd0eb2c13-kube-api-access-cb26x\") pod \"dns-operator-744455d44c-svzpr\" (UID: \"e5dcbc97-fe74-4313-8205-4b8fd0eb2c13\") " pod="openshift-dns-operator/dns-operator-744455d44c-svzpr" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.162057 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-trusted-ca-bundle\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.162073 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c2ef3a5-4e54-4162-bef2-60a40dc206f7-config\") pod \"kube-controller-manager-operator-78b949d7b-kqkfj\" (UID: \"2c2ef3a5-4e54-4162-bef2-60a40dc206f7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.162100 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/90af0e3f-43b4-4349-933b-1d9c22b76438-node-pullsecrets\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.162119 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/90af0e3f-43b4-4349-933b-1d9c22b76438-etcd-client\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.162133 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2996d85c-85fd-4462-9f50-b5dea4b5a762-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-ks9ht\" (UID: \"2996d85c-85fd-4462-9f50-b5dea4b5a762\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.162157 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0119aeb-1cd1-4503-ba6d-062f07f01491-config\") pod \"authentication-operator-69f744f599-v5x6s\" (UID: \"d0119aeb-1cd1-4503-ba6d-062f07f01491\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.162171 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f61a29cb-55b4-42b7-817f-67600cb70980-config\") pod \"route-controller-manager-6576b87f9c-x2flc\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.162184 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e7263104-6357-4d41-a133-faf27fb96fd4-console-oauth-config\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.162197 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c86jw\" (UniqueName: \"kubernetes.io/projected/2996d85c-85fd-4462-9f50-b5dea4b5a762-kube-api-access-c86jw\") pod \"openshift-controller-manager-operator-756b6f6bc6-ks9ht\" (UID: \"2996d85c-85fd-4462-9f50-b5dea4b5a762\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.162211 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/90af0e3f-43b4-4349-933b-1d9c22b76438-image-import-ca\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.176907 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.182147 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-vm2hb"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.184978 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.186088 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.190199 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.190644 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.190778 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.190980 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.191034 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.191127 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.191169 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.191255 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.188206 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.201402 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.202100 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.202553 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.204871 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.204966 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-svzpr"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.205457 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.206573 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.208030 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.208220 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.209128 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.209405 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.209476 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.209847 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.210749 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.216298 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.220080 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.221882 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-xxldq"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.225338 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-69fxq"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.226078 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-gh8sw"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.226106 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-m8fcn"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.226187 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-69fxq" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.228602 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g9hgb"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.229544 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.230842 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-pkj67"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.231958 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.232861 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.233831 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.234823 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jzjr6"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.236244 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2sdv9"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.237054 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qmcz9"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.238078 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-f4244"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.239092 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.239136 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.239171 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.240036 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-k4csg"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.241620 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.241970 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.243113 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.244194 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.245232 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.246381 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rxhgz"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.248057 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.254131 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8bcjv"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.258659 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ghgfc"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.259520 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.259748 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-f4244"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.260744 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.262429 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-flxzr"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.262814 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93abc37a-cc70-4b86-bd3f-4d3945d029f4-config\") pod \"machine-api-operator-5694c8668f-mxmxt\" (UID: \"93abc37a-cc70-4b86-bd3f-4d3945d029f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.262846 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f61a29cb-55b4-42b7-817f-67600cb70980-serving-cert\") pod \"route-controller-manager-6576b87f9c-x2flc\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.262871 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lzbvn\" (UID: \"9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.262896 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/90af0e3f-43b4-4349-933b-1d9c22b76438-encryption-config\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.262932 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bczhb\" (UniqueName: \"kubernetes.io/projected/d0119aeb-1cd1-4503-ba6d-062f07f01491-kube-api-access-bczhb\") pod \"authentication-operator-69f744f599-v5x6s\" (UID: \"d0119aeb-1cd1-4503-ba6d-062f07f01491\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.262955 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4bxh\" (UniqueName: \"kubernetes.io/projected/90af0e3f-43b4-4349-933b-1d9c22b76438-kube-api-access-t4bxh\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.262977 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/16d60e4a-c323-46b9-98ac-7ce9e1949e9f-trusted-ca\") pod \"ingress-operator-5b745b69d9-qm22x\" (UID: \"16d60e4a-c323-46b9-98ac-7ce9e1949e9f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.262996 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcgmc\" (UniqueName: \"kubernetes.io/projected/e7263104-6357-4d41-a133-faf27fb96fd4-kube-api-access-xcgmc\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263018 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/16d60e4a-c323-46b9-98ac-7ce9e1949e9f-metrics-tls\") pod \"ingress-operator-5b745b69d9-qm22x\" (UID: \"16d60e4a-c323-46b9-98ac-7ce9e1949e9f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263039 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lzbvn\" (UID: \"9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263060 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-service-ca\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263081 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/90af0e3f-43b4-4349-933b-1d9c22b76438-etcd-serving-ca\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263102 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/90af0e3f-43b4-4349-933b-1d9c22b76438-audit-dir\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263122 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-client-ca\") pod \"controller-manager-879f6c89f-5g67q\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263152 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f61a29cb-55b4-42b7-817f-67600cb70980-client-ca\") pod \"route-controller-manager-6576b87f9c-x2flc\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263174 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-5g67q\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263194 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lzbvn\" (UID: \"9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263214 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90af0e3f-43b4-4349-933b-1d9c22b76438-serving-cert\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263243 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/93abc37a-cc70-4b86-bd3f-4d3945d029f4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-mxmxt\" (UID: \"93abc37a-cc70-4b86-bd3f-4d3945d029f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263265 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/90af0e3f-43b4-4349-933b-1d9c22b76438-audit\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263292 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/93abc37a-cc70-4b86-bd3f-4d3945d029f4-images\") pod \"machine-api-operator-5694c8668f-mxmxt\" (UID: \"93abc37a-cc70-4b86-bd3f-4d3945d029f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263314 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d0119aeb-1cd1-4503-ba6d-062f07f01491-service-ca-bundle\") pod \"authentication-operator-69f744f599-v5x6s\" (UID: \"d0119aeb-1cd1-4503-ba6d-062f07f01491\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263335 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gwjd\" (UniqueName: \"kubernetes.io/projected/16d60e4a-c323-46b9-98ac-7ce9e1949e9f-kube-api-access-2gwjd\") pod \"ingress-operator-5b745b69d9-qm22x\" (UID: \"16d60e4a-c323-46b9-98ac-7ce9e1949e9f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263358 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-config\") pod \"controller-manager-879f6c89f-5g67q\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263387 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cb26x\" (UniqueName: \"kubernetes.io/projected/e5dcbc97-fe74-4313-8205-4b8fd0eb2c13-kube-api-access-cb26x\") pod \"dns-operator-744455d44c-svzpr\" (UID: \"e5dcbc97-fe74-4313-8205-4b8fd0eb2c13\") " pod="openshift-dns-operator/dns-operator-744455d44c-svzpr" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263407 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-trusted-ca-bundle\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263432 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2996d85c-85fd-4462-9f50-b5dea4b5a762-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-ks9ht\" (UID: \"2996d85c-85fd-4462-9f50-b5dea4b5a762\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263455 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c2ef3a5-4e54-4162-bef2-60a40dc206f7-config\") pod \"kube-controller-manager-operator-78b949d7b-kqkfj\" (UID: \"2c2ef3a5-4e54-4162-bef2-60a40dc206f7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263475 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/90af0e3f-43b4-4349-933b-1d9c22b76438-node-pullsecrets\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263499 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/90af0e3f-43b4-4349-933b-1d9c22b76438-etcd-client\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263532 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e7263104-6357-4d41-a133-faf27fb96fd4-console-oauth-config\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263555 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c86jw\" (UniqueName: \"kubernetes.io/projected/2996d85c-85fd-4462-9f50-b5dea4b5a762-kube-api-access-c86jw\") pod \"openshift-controller-manager-operator-756b6f6bc6-ks9ht\" (UID: \"2996d85c-85fd-4462-9f50-b5dea4b5a762\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263599 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/90af0e3f-43b4-4349-933b-1d9c22b76438-image-import-ca\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263623 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0119aeb-1cd1-4503-ba6d-062f07f01491-config\") pod \"authentication-operator-69f744f599-v5x6s\" (UID: \"d0119aeb-1cd1-4503-ba6d-062f07f01491\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263646 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f61a29cb-55b4-42b7-817f-67600cb70980-config\") pod \"route-controller-manager-6576b87f9c-x2flc\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263665 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e7263104-6357-4d41-a133-faf27fb96fd4-console-serving-cert\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263686 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a21648d7-b40f-4da6-a228-c44b785c788a-config\") pod \"kube-apiserver-operator-766d6c64bb-6n55q\" (UID: \"a21648d7-b40f-4da6-a228-c44b785c788a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263708 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2996d85c-85fd-4462-9f50-b5dea4b5a762-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-ks9ht\" (UID: \"2996d85c-85fd-4462-9f50-b5dea4b5a762\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263730 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90af0e3f-43b4-4349-933b-1d9c22b76438-config\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263751 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gmh5\" (UniqueName: \"kubernetes.io/projected/93abc37a-cc70-4b86-bd3f-4d3945d029f4-kube-api-access-7gmh5\") pod \"machine-api-operator-5694c8668f-mxmxt\" (UID: \"93abc37a-cc70-4b86-bd3f-4d3945d029f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263773 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d0119aeb-1cd1-4503-ba6d-062f07f01491-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-v5x6s\" (UID: \"d0119aeb-1cd1-4503-ba6d-062f07f01491\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263794 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/16d60e4a-c323-46b9-98ac-7ce9e1949e9f-bound-sa-token\") pod \"ingress-operator-5b745b69d9-qm22x\" (UID: \"16d60e4a-c323-46b9-98ac-7ce9e1949e9f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263815 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a21648d7-b40f-4da6-a228-c44b785c788a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-6n55q\" (UID: \"a21648d7-b40f-4da6-a228-c44b785c788a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263836 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c2ef3a5-4e54-4162-bef2-60a40dc206f7-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-kqkfj\" (UID: \"2c2ef3a5-4e54-4162-bef2-60a40dc206f7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263859 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c2ef3a5-4e54-4162-bef2-60a40dc206f7-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-kqkfj\" (UID: \"2c2ef3a5-4e54-4162-bef2-60a40dc206f7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263882 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d0119aeb-1cd1-4503-ba6d-062f07f01491-serving-cert\") pod \"authentication-operator-69f744f599-v5x6s\" (UID: \"d0119aeb-1cd1-4503-ba6d-062f07f01491\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263907 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2n9wm\" (UniqueName: \"kubernetes.io/projected/f61a29cb-55b4-42b7-817f-67600cb70980-kube-api-access-2n9wm\") pod \"route-controller-manager-6576b87f9c-x2flc\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263935 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bv2pn\" (UniqueName: \"kubernetes.io/projected/6244913f-facc-4fe2-be8c-09238b2704d6-kube-api-access-bv2pn\") pod \"controller-manager-879f6c89f-5g67q\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263957 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpnlt\" (UniqueName: \"kubernetes.io/projected/9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292-kube-api-access-wpnlt\") pod \"cluster-image-registry-operator-dc59b4c8b-lzbvn\" (UID: \"9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263963 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/16d60e4a-c323-46b9-98ac-7ce9e1949e9f-trusted-ca\") pod \"ingress-operator-5b745b69d9-qm22x\" (UID: \"16d60e4a-c323-46b9-98ac-7ce9e1949e9f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263977 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e5dcbc97-fe74-4313-8205-4b8fd0eb2c13-metrics-tls\") pod \"dns-operator-744455d44c-svzpr\" (UID: \"e5dcbc97-fe74-4313-8205-4b8fd0eb2c13\") " pod="openshift-dns-operator/dns-operator-744455d44c-svzpr" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.264001 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-oauth-serving-cert\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.264018 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-xknd4"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.264504 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93abc37a-cc70-4b86-bd3f-4d3945d029f4-config\") pod \"machine-api-operator-5694c8668f-mxmxt\" (UID: \"93abc37a-cc70-4b86-bd3f-4d3945d029f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.265058 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-flxzr"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.265144 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-xknd4" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.264024 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a21648d7-b40f-4da6-a228-c44b785c788a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-6n55q\" (UID: \"a21648d7-b40f-4da6-a228-c44b785c788a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.265431 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/90af0e3f-43b4-4349-933b-1d9c22b76438-trusted-ca-bundle\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.265484 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6244913f-facc-4fe2-be8c-09238b2704d6-serving-cert\") pod \"controller-manager-879f6c89f-5g67q\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.265512 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-console-config\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.265749 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d0119aeb-1cd1-4503-ba6d-062f07f01491-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-v5x6s\" (UID: \"d0119aeb-1cd1-4503-ba6d-062f07f01491\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.266417 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-console-config\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.266899 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rgrlp"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.266941 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-xknd4"] Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.267056 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-config\") pod \"controller-manager-879f6c89f-5g67q\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.267701 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/90af0e3f-43b4-4349-933b-1d9c22b76438-trusted-ca-bundle\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.268058 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d0119aeb-1cd1-4503-ba6d-062f07f01491-serving-cert\") pod \"authentication-operator-69f744f599-v5x6s\" (UID: \"d0119aeb-1cd1-4503-ba6d-062f07f01491\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.268080 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lzbvn\" (UID: \"9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.268237 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/90af0e3f-43b4-4349-933b-1d9c22b76438-encryption-config\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.268278 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/90af0e3f-43b4-4349-933b-1d9c22b76438-audit-dir\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.268744 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/90af0e3f-43b4-4349-933b-1d9c22b76438-audit\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.263063 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-flxzr" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.269593 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/93abc37a-cc70-4b86-bd3f-4d3945d029f4-images\") pod \"machine-api-operator-5694c8668f-mxmxt\" (UID: \"93abc37a-cc70-4b86-bd3f-4d3945d029f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.270089 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d0119aeb-1cd1-4503-ba6d-062f07f01491-service-ca-bundle\") pod \"authentication-operator-69f744f599-v5x6s\" (UID: \"d0119aeb-1cd1-4503-ba6d-062f07f01491\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.270192 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/90af0e3f-43b4-4349-933b-1d9c22b76438-image-import-ca\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.270292 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6244913f-facc-4fe2-be8c-09238b2704d6-serving-cert\") pod \"controller-manager-879f6c89f-5g67q\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.270662 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2996d85c-85fd-4462-9f50-b5dea4b5a762-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-ks9ht\" (UID: \"2996d85c-85fd-4462-9f50-b5dea4b5a762\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.270779 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90af0e3f-43b4-4349-933b-1d9c22b76438-config\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.271111 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/93abc37a-cc70-4b86-bd3f-4d3945d029f4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-mxmxt\" (UID: \"93abc37a-cc70-4b86-bd3f-4d3945d029f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.271362 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-5g67q\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.271376 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-client-ca\") pod \"controller-manager-879f6c89f-5g67q\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.271480 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/90af0e3f-43b4-4349-933b-1d9c22b76438-etcd-client\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.271609 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e7263104-6357-4d41-a133-faf27fb96fd4-console-oauth-config\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.271830 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e7263104-6357-4d41-a133-faf27fb96fd4-console-serving-cert\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.272019 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/90af0e3f-43b4-4349-933b-1d9c22b76438-etcd-serving-ca\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.272151 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f61a29cb-55b4-42b7-817f-67600cb70980-client-ca\") pod \"route-controller-manager-6576b87f9c-x2flc\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.272269 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0119aeb-1cd1-4503-ba6d-062f07f01491-config\") pod \"authentication-operator-69f744f599-v5x6s\" (UID: \"d0119aeb-1cd1-4503-ba6d-062f07f01491\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.272357 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-trusted-ca-bundle\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.272388 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/90af0e3f-43b4-4349-933b-1d9c22b76438-node-pullsecrets\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.272749 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c2ef3a5-4e54-4162-bef2-60a40dc206f7-config\") pod \"kube-controller-manager-operator-78b949d7b-kqkfj\" (UID: \"2c2ef3a5-4e54-4162-bef2-60a40dc206f7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.272802 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f61a29cb-55b4-42b7-817f-67600cb70980-config\") pod \"route-controller-manager-6576b87f9c-x2flc\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.272999 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-oauth-serving-cert\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.273067 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-service-ca\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.273721 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c2ef3a5-4e54-4162-bef2-60a40dc206f7-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-kqkfj\" (UID: \"2c2ef3a5-4e54-4162-bef2-60a40dc206f7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.273951 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lzbvn\" (UID: \"9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.274156 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90af0e3f-43b4-4349-933b-1d9c22b76438-serving-cert\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.274983 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e5dcbc97-fe74-4313-8205-4b8fd0eb2c13-metrics-tls\") pod \"dns-operator-744455d44c-svzpr\" (UID: \"e5dcbc97-fe74-4313-8205-4b8fd0eb2c13\") " pod="openshift-dns-operator/dns-operator-744455d44c-svzpr" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.274991 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f61a29cb-55b4-42b7-817f-67600cb70980-serving-cert\") pod \"route-controller-manager-6576b87f9c-x2flc\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.276498 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2996d85c-85fd-4462-9f50-b5dea4b5a762-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-ks9ht\" (UID: \"2996d85c-85fd-4462-9f50-b5dea4b5a762\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.279890 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.306038 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/16d60e4a-c323-46b9-98ac-7ce9e1949e9f-metrics-tls\") pod \"ingress-operator-5b745b69d9-qm22x\" (UID: \"16d60e4a-c323-46b9-98ac-7ce9e1949e9f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.307238 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.324410 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.339213 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.359474 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.379245 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.381120 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a21648d7-b40f-4da6-a228-c44b785c788a-config\") pod \"kube-apiserver-operator-766d6c64bb-6n55q\" (UID: \"a21648d7-b40f-4da6-a228-c44b785c788a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.399185 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.406421 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a21648d7-b40f-4da6-a228-c44b785c788a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-6n55q\" (UID: \"a21648d7-b40f-4da6-a228-c44b785c788a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.439825 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.460471 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.479439 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.499256 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.519018 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.538664 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.560005 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.579467 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.599727 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.619960 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.642464 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.661842 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.680014 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.700128 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.719956 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.741435 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.759876 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.779490 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.799050 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.819117 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.839995 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.859808 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.882299 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.901295 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.920335 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.940784 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.959373 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.980012 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 02 14:18:34 crc kubenswrapper[4902]: I1202 14:18:34.999514 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.019940 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.039131 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.067299 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.080308 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.100935 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.118315 4902 request.go:700] Waited for 1.018097421s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-storage-version-migrator/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.120048 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.140255 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.159681 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.179161 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.200693 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.219674 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.240117 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.260006 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.278667 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.278793 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:35 crc kubenswrapper[4902]: E1202 14:18:35.278859 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:20:37.278830703 +0000 UTC m=+268.470139482 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.279000 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.279091 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.279123 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.280581 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.280683 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.283815 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.284099 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.284553 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.299762 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.319536 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.333475 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.340395 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.350354 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.359780 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.365617 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.381001 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.400906 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.420618 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.444111 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.459520 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.480886 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.520081 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.539516 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 02 14:18:35 crc kubenswrapper[4902]: W1202 14:18:35.547533 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-0972f1dec2b8242b99e43292dcd04749c715c0f8917b5d12aac9c84add34c915 WatchSource:0}: Error finding container 0972f1dec2b8242b99e43292dcd04749c715c0f8917b5d12aac9c84add34c915: Status 404 returned error can't find the container with id 0972f1dec2b8242b99e43292dcd04749c715c0f8917b5d12aac9c84add34c915 Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.558811 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.579378 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 02 14:18:35 crc kubenswrapper[4902]: W1202 14:18:35.581063 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-bb921cf29e3773a25ba5b6f088b5870c5cf34df995826a70e41346c5daa896de WatchSource:0}: Error finding container bb921cf29e3773a25ba5b6f088b5870c5cf34df995826a70e41346c5daa896de: Status 404 returned error can't find the container with id bb921cf29e3773a25ba5b6f088b5870c5cf34df995826a70e41346c5daa896de Dec 02 14:18:35 crc kubenswrapper[4902]: W1202 14:18:35.586464 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-9a0fb8844408188f3840b357f22a16a895340062995338cb54e0a8d51de6da17 WatchSource:0}: Error finding container 9a0fb8844408188f3840b357f22a16a895340062995338cb54e0a8d51de6da17: Status 404 returned error can't find the container with id 9a0fb8844408188f3840b357f22a16a895340062995338cb54e0a8d51de6da17 Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.600052 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.619797 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.639740 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.658687 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.679334 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.699836 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.719983 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.739081 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.758632 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.779260 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.798968 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.819503 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.840297 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.859813 4902 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.868320 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"bb921cf29e3773a25ba5b6f088b5870c5cf34df995826a70e41346c5daa896de"} Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.869792 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"0972f1dec2b8242b99e43292dcd04749c715c0f8917b5d12aac9c84add34c915"} Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.872040 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"9a0fb8844408188f3840b357f22a16a895340062995338cb54e0a8d51de6da17"} Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.880653 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.916836 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcgmc\" (UniqueName: \"kubernetes.io/projected/e7263104-6357-4d41-a133-faf27fb96fd4-kube-api-access-xcgmc\") pod \"console-f9d7485db-8bcjv\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.919182 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.957901 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bczhb\" (UniqueName: \"kubernetes.io/projected/d0119aeb-1cd1-4503-ba6d-062f07f01491-kube-api-access-bczhb\") pod \"authentication-operator-69f744f599-v5x6s\" (UID: \"d0119aeb-1cd1-4503-ba6d-062f07f01491\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.960245 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 02 14:18:35 crc kubenswrapper[4902]: I1202 14:18:35.980062 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.000177 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.016634 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.046130 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/16d60e4a-c323-46b9-98ac-7ce9e1949e9f-bound-sa-token\") pod \"ingress-operator-5b745b69d9-qm22x\" (UID: \"16d60e4a-c323-46b9-98ac-7ce9e1949e9f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.057227 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpnlt\" (UniqueName: \"kubernetes.io/projected/9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292-kube-api-access-wpnlt\") pod \"cluster-image-registry-operator-dc59b4c8b-lzbvn\" (UID: \"9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.087263 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2n9wm\" (UniqueName: \"kubernetes.io/projected/f61a29cb-55b4-42b7-817f-67600cb70980-kube-api-access-2n9wm\") pod \"route-controller-manager-6576b87f9c-x2flc\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.106145 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a21648d7-b40f-4da6-a228-c44b785c788a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-6n55q\" (UID: \"a21648d7-b40f-4da6-a228-c44b785c788a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.109354 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.123883 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bv2pn\" (UniqueName: \"kubernetes.io/projected/6244913f-facc-4fe2-be8c-09238b2704d6-kube-api-access-bv2pn\") pod \"controller-manager-879f6c89f-5g67q\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.138056 4902 request.go:700] Waited for 1.869648569s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/serviceaccounts/cluster-image-registry-operator/token Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.149741 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4bxh\" (UniqueName: \"kubernetes.io/projected/90af0e3f-43b4-4349-933b-1d9c22b76438-kube-api-access-t4bxh\") pod \"apiserver-76f77b778f-vm2hb\" (UID: \"90af0e3f-43b4-4349-933b-1d9c22b76438\") " pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.159964 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.167048 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lzbvn\" (UID: \"9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.180969 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.185007 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.202010 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.215537 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.227959 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8bcjv"] Dec 02 14:18:36 crc kubenswrapper[4902]: W1202 14:18:36.239336 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7263104_6357_4d41_a133_faf27fb96fd4.slice/crio-fac84db31c77083b7325072325935d911e6484649a6f590cc94d4245d82531a9 WatchSource:0}: Error finding container fac84db31c77083b7325072325935d911e6484649a6f590cc94d4245d82531a9: Status 404 returned error can't find the container with id fac84db31c77083b7325072325935d911e6484649a6f590cc94d4245d82531a9 Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.241548 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gwjd\" (UniqueName: \"kubernetes.io/projected/16d60e4a-c323-46b9-98ac-7ce9e1949e9f-kube-api-access-2gwjd\") pod \"ingress-operator-5b745b69d9-qm22x\" (UID: \"16d60e4a-c323-46b9-98ac-7ce9e1949e9f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.259741 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.264548 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c86jw\" (UniqueName: \"kubernetes.io/projected/2996d85c-85fd-4462-9f50-b5dea4b5a762-kube-api-access-c86jw\") pod \"openshift-controller-manager-operator-756b6f6bc6-ks9ht\" (UID: \"2996d85c-85fd-4462-9f50-b5dea4b5a762\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.275110 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c2ef3a5-4e54-4162-bef2-60a40dc206f7-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-kqkfj\" (UID: \"2c2ef3a5-4e54-4162-bef2-60a40dc206f7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.302232 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gmh5\" (UniqueName: \"kubernetes.io/projected/93abc37a-cc70-4b86-bd3f-4d3945d029f4-kube-api-access-7gmh5\") pod \"machine-api-operator-5694c8668f-mxmxt\" (UID: \"93abc37a-cc70-4b86-bd3f-4d3945d029f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.311823 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.317603 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cb26x\" (UniqueName: \"kubernetes.io/projected/e5dcbc97-fe74-4313-8205-4b8fd0eb2c13-kube-api-access-cb26x\") pod \"dns-operator-744455d44c-svzpr\" (UID: \"e5dcbc97-fe74-4313-8205-4b8fd0eb2c13\") " pod="openshift-dns-operator/dns-operator-744455d44c-svzpr" Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.361250 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc"] Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.390657 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-vm2hb"] Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.415416 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-v5x6s"] Dec 02 14:18:36 crc kubenswrapper[4902]: I1202 14:18:36.876219 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8bcjv" event={"ID":"e7263104-6357-4d41-a133-faf27fb96fd4","Type":"ContainerStarted","Data":"fac84db31c77083b7325072325935d911e6484649a6f590cc94d4245d82531a9"} Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.527472 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.530674 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.533516 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.534606 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.535442 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.536868 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-svzpr" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.554082 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-bound-sa-token\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.555597 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tplxn\" (UniqueName: \"kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-kube-api-access-tplxn\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.555664 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.555783 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-trusted-ca\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.555847 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.556132 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.556762 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-registry-certificates\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.556904 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-registry-tls\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: E1202 14:18:40.557321 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:41.057242688 +0000 UTC m=+152.248551427 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:40 crc kubenswrapper[4902]: W1202 14:18:40.566546 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf61a29cb_55b4_42b7_817f_67600cb70980.slice/crio-76e5a21021a65d29a66e6a0752b5f19ebc3765d7ab7a6d28ea4fa544ff4272a5 WatchSource:0}: Error finding container 76e5a21021a65d29a66e6a0752b5f19ebc3765d7ab7a6d28ea4fa544ff4272a5: Status 404 returned error can't find the container with id 76e5a21021a65d29a66e6a0752b5f19ebc3765d7ab7a6d28ea4fa544ff4272a5 Dec 02 14:18:40 crc kubenswrapper[4902]: W1202 14:18:40.571781 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd0119aeb_1cd1_4503_ba6d_062f07f01491.slice/crio-1df0fb1ebb3d42a7628fb4c81cec5a4b7fa801b47d2c935a3d3adb21759d17c9 WatchSource:0}: Error finding container 1df0fb1ebb3d42a7628fb4c81cec5a4b7fa801b47d2c935a3d3adb21759d17c9: Status 404 returned error can't find the container with id 1df0fb1ebb3d42a7628fb4c81cec5a4b7fa801b47d2c935a3d3adb21759d17c9 Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.659193 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.659275 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zc7cz\" (UniqueName: \"kubernetes.io/projected/07de0872-46f2-4a69-af4c-a811e7ee3a8d-kube-api-access-zc7cz\") pod \"control-plane-machine-set-operator-78cbb6b69f-rxhgz\" (UID: \"07de0872-46f2-4a69-af4c-a811e7ee3a8d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rxhgz" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.659300 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/54c71347-54d5-49cb-b5bb-12b0d607f8ad-registration-dir\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.659324 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5m9k\" (UniqueName: \"kubernetes.io/projected/8f341c23-6370-47df-af45-d654197c980b-kube-api-access-q5m9k\") pod \"packageserver-d55dfcdfc-tskrw\" (UID: \"8f341c23-6370-47df-af45-d654197c980b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.659344 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e530e0e8-55f8-4601-b587-2069a7d6fec8-audit-dir\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.659364 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpnch\" (UniqueName: \"kubernetes.io/projected/36555a35-fed5-4cc3-b0d1-940a5de68bfe-kube-api-access-kpnch\") pod \"downloads-7954f5f757-xxldq\" (UID: \"36555a35-fed5-4cc3-b0d1-940a5de68bfe\") " pod="openshift-console/downloads-7954f5f757-xxldq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.659386 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4df04066-9f98-4c4e-88eb-704118e339d3-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-szb48\" (UID: \"4df04066-9f98-4c4e-88eb-704118e339d3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.659418 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8djfx\" (UniqueName: \"kubernetes.io/projected/54c71347-54d5-49cb-b5bb-12b0d607f8ad-kube-api-access-8djfx\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.659608 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c5059510-c9f9-4945-8964-b74b62a6352d-config-volume\") pod \"collect-profiles-29411415-rc7hx\" (UID: \"c5059510-c9f9-4945-8964-b74b62a6352d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.659637 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/54c71347-54d5-49cb-b5bb-12b0d607f8ad-plugins-dir\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.659654 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rh72t\" (UniqueName: \"kubernetes.io/projected/326be887-d9c0-41bd-9eec-2c753701d197-kube-api-access-rh72t\") pod \"ingress-canary-xknd4\" (UID: \"326be887-d9c0-41bd-9eec-2c753701d197\") " pod="openshift-ingress-canary/ingress-canary-xknd4" Dec 02 14:18:40 crc kubenswrapper[4902]: E1202 14:18:40.660669 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:41.160645438 +0000 UTC m=+152.351954147 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.659671 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9768e394-e777-4957-9c84-1a1c0e70cc9f-etcd-client\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661348 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t86xh\" (UniqueName: \"kubernetes.io/projected/58a8bd42-a740-4f79-bed8-87ec3ab27a0d-kube-api-access-t86xh\") pod \"console-operator-58897d9998-pkj67\" (UID: \"58a8bd42-a740-4f79-bed8-87ec3ab27a0d\") " pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661376 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/355cfe3d-0577-427d-a6fc-31df4e70e6d5-srv-cert\") pod \"olm-operator-6b444d44fb-z86lr\" (UID: \"355cfe3d-0577-427d-a6fc-31df4e70e6d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661396 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwrrb\" (UniqueName: \"kubernetes.io/projected/4df04066-9f98-4c4e-88eb-704118e339d3-kube-api-access-zwrrb\") pod \"kube-storage-version-migrator-operator-b67b599dd-szb48\" (UID: \"4df04066-9f98-4c4e-88eb-704118e339d3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661470 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/9768e394-e777-4957-9c84-1a1c0e70cc9f-encryption-config\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661501 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661527 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/55a80003-f940-44a9-8f12-55f887816397-node-bootstrap-token\") pod \"machine-config-server-69fxq\" (UID: \"55a80003-f940-44a9-8f12-55f887816397\") " pod="openshift-machine-config-operator/machine-config-server-69fxq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661550 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8f341c23-6370-47df-af45-d654197c980b-webhook-cert\") pod \"packageserver-d55dfcdfc-tskrw\" (UID: \"8f341c23-6370-47df-af45-d654197c980b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661593 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9391167d-2391-4d04-a34a-6ac4aa19518b-serving-cert\") pod \"service-ca-operator-777779d784-wkjp6\" (UID: \"9391167d-2391-4d04-a34a-6ac4aa19518b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661618 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c5059510-c9f9-4945-8964-b74b62a6352d-secret-volume\") pod \"collect-profiles-29411415-rc7hx\" (UID: \"c5059510-c9f9-4945-8964-b74b62a6352d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661678 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661701 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/9768e394-e777-4957-9c84-1a1c0e70cc9f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661722 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9768e394-e777-4957-9c84-1a1c0e70cc9f-audit-policies\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661744 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b853669d-191c-4a38-96c8-290914042f96-default-certificate\") pod \"router-default-5444994796-lgl7q\" (UID: \"b853669d-191c-4a38-96c8-290914042f96\") " pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661777 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-audit-policies\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661834 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-registry-certificates\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661857 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0fd5896-8b33-4fb3-a56c-1e7741ca29df-serving-cert\") pod \"openshift-config-operator-7777fb866f-k4csg\" (UID: \"f0fd5896-8b33-4fb3-a56c-1e7741ca29df\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661879 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4xx8\" (UniqueName: \"kubernetes.io/projected/355cfe3d-0577-427d-a6fc-31df4e70e6d5-kube-api-access-p4xx8\") pod \"olm-operator-6b444d44fb-z86lr\" (UID: \"355cfe3d-0577-427d-a6fc-31df4e70e6d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661901 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjs6b\" (UniqueName: \"kubernetes.io/projected/69ffdb33-2a31-4487-bc6c-ea6919c4a5dc-kube-api-access-cjs6b\") pod \"catalog-operator-68c6474976-682gl\" (UID: \"69ffdb33-2a31-4487-bc6c-ea6919c4a5dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661921 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/54c71347-54d5-49cb-b5bb-12b0d607f8ad-csi-data-dir\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.661981 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4db84fcb-a010-4914-9975-9a99aafe6cb2-images\") pod \"machine-config-operator-74547568cd-ppjnj\" (UID: \"4db84fcb-a010-4914-9975-9a99aafe6cb2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.662003 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b853669d-191c-4a38-96c8-290914042f96-service-ca-bundle\") pod \"router-default-5444994796-lgl7q\" (UID: \"b853669d-191c-4a38-96c8-290914042f96\") " pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.662025 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e-signing-cabundle\") pod \"service-ca-9c57cc56f-rgrlp\" (UID: \"ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e\") " pod="openshift-service-ca/service-ca-9c57cc56f-rgrlp" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.662049 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58a8bd42-a740-4f79-bed8-87ec3ab27a0d-serving-cert\") pod \"console-operator-58897d9998-pkj67\" (UID: \"58a8bd42-a740-4f79-bed8-87ec3ab27a0d\") " pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.662071 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/07de0872-46f2-4a69-af4c-a811e7ee3a8d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-rxhgz\" (UID: \"07de0872-46f2-4a69-af4c-a811e7ee3a8d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rxhgz" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.662108 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/54c71347-54d5-49cb-b5bb-12b0d607f8ad-mountpoint-dir\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.662128 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/55a80003-f940-44a9-8f12-55f887816397-certs\") pod \"machine-config-server-69fxq\" (UID: \"55a80003-f940-44a9-8f12-55f887816397\") " pod="openshift-machine-config-operator/machine-config-server-69fxq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.662150 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e-signing-key\") pod \"service-ca-9c57cc56f-rgrlp\" (UID: \"ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e\") " pod="openshift-service-ca/service-ca-9c57cc56f-rgrlp" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.662173 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zggld\" (UniqueName: \"kubernetes.io/projected/30d9c634-f04e-4333-bdc6-083b2fdf5e14-kube-api-access-zggld\") pod \"migrator-59844c95c7-m8fcn\" (UID: \"30d9c634-f04e-4333-bdc6-083b2fdf5e14\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-m8fcn" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.662197 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/553e438c-3d4e-4252-9021-a3725ea43f81-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xp79t\" (UID: \"553e438c-3d4e-4252-9021-a3725ea43f81\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.662262 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-bound-sa-token\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.662289 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbw44\" (UniqueName: \"kubernetes.io/projected/f0fd5896-8b33-4fb3-a56c-1e7741ca29df-kube-api-access-sbw44\") pod \"openshift-config-operator-7777fb866f-k4csg\" (UID: \"f0fd5896-8b33-4fb3-a56c-1e7741ca29df\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.662316 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9768e394-e777-4957-9c84-1a1c0e70cc9f-serving-cert\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.662338 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9768e394-e777-4957-9c84-1a1c0e70cc9f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: E1202 14:18:40.664015 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:41.163999874 +0000 UTC m=+152.355308693 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.664513 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.666181 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-registry-certificates\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.666253 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8nk2\" (UniqueName: \"kubernetes.io/projected/009e1183-d309-4988-b20c-54c056613558-kube-api-access-n8nk2\") pod \"marketplace-operator-79b997595-ghgfc\" (UID: \"009e1183-d309-4988-b20c-54c056613558\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.666338 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b853669d-191c-4a38-96c8-290914042f96-stats-auth\") pod \"router-default-5444994796-lgl7q\" (UID: \"b853669d-191c-4a38-96c8-290914042f96\") " pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.666432 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.666482 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/009e1183-d309-4988-b20c-54c056613558-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ghgfc\" (UID: \"009e1183-d309-4988-b20c-54c056613558\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.666506 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/b4f95734-55e8-4d38-8d4a-51f9876fa4e1-machine-approver-tls\") pod \"machine-approver-56656f9798-86crq\" (UID: \"b4f95734-55e8-4d38-8d4a-51f9876fa4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.667021 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-etcd-service-ca\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.667108 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/67986385-6a35-4b14-b38f-cdee1c9a1b5d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-fjmsz\" (UID: \"67986385-6a35-4b14-b38f-cdee1c9a1b5d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.667232 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.667329 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.667433 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tplxn\" (UniqueName: \"kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-kube-api-access-tplxn\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.667458 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4db84fcb-a010-4914-9975-9a99aafe6cb2-auth-proxy-config\") pod \"machine-config-operator-74547568cd-ppjnj\" (UID: \"4db84fcb-a010-4914-9975-9a99aafe6cb2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.667496 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.667520 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk2s2\" (UniqueName: \"kubernetes.io/projected/9391167d-2391-4d04-a34a-6ac4aa19518b-kube-api-access-zk2s2\") pod \"service-ca-operator-777779d784-wkjp6\" (UID: \"9391167d-2391-4d04-a34a-6ac4aa19518b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.667587 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gll5\" (UniqueName: \"kubernetes.io/projected/55a80003-f940-44a9-8f12-55f887816397-kube-api-access-6gll5\") pod \"machine-config-server-69fxq\" (UID: \"55a80003-f940-44a9-8f12-55f887816397\") " pod="openshift-machine-config-operator/machine-config-server-69fxq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.668938 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4df04066-9f98-4c4e-88eb-704118e339d3-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-szb48\" (UID: \"4df04066-9f98-4c4e-88eb-704118e339d3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669062 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-etcd-client\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669087 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26s2q\" (UniqueName: \"kubernetes.io/projected/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-kube-api-access-26s2q\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669111 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc5hk\" (UniqueName: \"kubernetes.io/projected/b4f95734-55e8-4d38-8d4a-51f9876fa4e1-kube-api-access-pc5hk\") pod \"machine-approver-56656f9798-86crq\" (UID: \"b4f95734-55e8-4d38-8d4a-51f9876fa4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669136 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669234 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/54c71347-54d5-49cb-b5bb-12b0d607f8ad-socket-dir\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669306 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9768e394-e777-4957-9c84-1a1c0e70cc9f-audit-dir\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669378 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-trusted-ca\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669731 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b4f95734-55e8-4d38-8d4a-51f9876fa4e1-auth-proxy-config\") pod \"machine-approver-56656f9798-86crq\" (UID: \"b4f95734-55e8-4d38-8d4a-51f9876fa4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669761 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669818 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/553e438c-3d4e-4252-9021-a3725ea43f81-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xp79t\" (UID: \"553e438c-3d4e-4252-9021-a3725ea43f81\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669842 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-etcd-ca\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669862 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5plh\" (UniqueName: \"kubernetes.io/projected/67986385-6a35-4b14-b38f-cdee1c9a1b5d-kube-api-access-w5plh\") pod \"package-server-manager-789f6589d5-fjmsz\" (UID: \"67986385-6a35-4b14-b38f-cdee1c9a1b5d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669884 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81329550-5c15-4ad3-bc03-49a3c0506f9f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jpl42\" (UID: \"81329550-5c15-4ad3-bc03-49a3c0506f9f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669909 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/355cfe3d-0577-427d-a6fc-31df4e70e6d5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-z86lr\" (UID: \"355cfe3d-0577-427d-a6fc-31df4e70e6d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669932 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9d0021ed-bd4b-49bd-a0a1-b175e794697e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9xtpg\" (UID: \"9d0021ed-bd4b-49bd-a0a1-b175e794697e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669952 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8f341c23-6370-47df-af45-d654197c980b-tmpfs\") pod \"packageserver-d55dfcdfc-tskrw\" (UID: \"8f341c23-6370-47df-af45-d654197c980b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669972 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/326be887-d9c0-41bd-9eec-2c753701d197-cert\") pod \"ingress-canary-xknd4\" (UID: \"326be887-d9c0-41bd-9eec-2c753701d197\") " pod="openshift-ingress-canary/ingress-canary-xknd4" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.669995 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjhnw\" (UniqueName: \"kubernetes.io/projected/b96aa7be-f972-4bd7-bd39-2d0a2017101c-kube-api-access-vjhnw\") pod \"multus-admission-controller-857f4d67dd-qmcz9\" (UID: \"b96aa7be-f972-4bd7-bd39-2d0a2017101c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qmcz9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.670042 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tcht\" (UniqueName: \"kubernetes.io/projected/8b4b4ce5-869d-4047-b1d5-b4365ded3549-kube-api-access-7tcht\") pod \"cluster-samples-operator-665b6dd947-jzjr6\" (UID: \"8b4b4ce5-869d-4047-b1d5-b4365ded3549\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jzjr6" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.670076 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-config\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.670155 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f0fd5896-8b33-4fb3-a56c-1e7741ca29df-available-featuregates\") pod \"openshift-config-operator-7777fb866f-k4csg\" (UID: \"f0fd5896-8b33-4fb3-a56c-1e7741ca29df\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.670222 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.670247 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b96aa7be-f972-4bd7-bd39-2d0a2017101c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qmcz9\" (UID: \"b96aa7be-f972-4bd7-bd39-2d0a2017101c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qmcz9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.670297 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.670320 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/009e1183-d309-4988-b20c-54c056613558-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ghgfc\" (UID: \"009e1183-d309-4988-b20c-54c056613558\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.670341 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81329550-5c15-4ad3-bc03-49a3c0506f9f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jpl42\" (UID: \"81329550-5c15-4ad3-bc03-49a3c0506f9f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.670360 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/29d9646d-bcc6-42a1-b082-286dc0bf9131-metrics-tls\") pod \"dns-default-flxzr\" (UID: \"29d9646d-bcc6-42a1-b082-286dc0bf9131\") " pod="openshift-dns/dns-default-flxzr" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671231 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671258 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-serving-cert\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671279 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5ml8\" (UniqueName: \"kubernetes.io/projected/81329550-5c15-4ad3-bc03-49a3c0506f9f-kube-api-access-d5ml8\") pod \"openshift-apiserver-operator-796bbdcf4f-jpl42\" (UID: \"81329550-5c15-4ad3-bc03-49a3c0506f9f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671299 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k47t6\" (UniqueName: \"kubernetes.io/projected/b853669d-191c-4a38-96c8-290914042f96-kube-api-access-k47t6\") pod \"router-default-5444994796-lgl7q\" (UID: \"b853669d-191c-4a38-96c8-290914042f96\") " pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671344 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vlwd\" (UniqueName: \"kubernetes.io/projected/9768e394-e777-4957-9c84-1a1c0e70cc9f-kube-api-access-9vlwd\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671365 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/553e438c-3d4e-4252-9021-a3725ea43f81-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xp79t\" (UID: \"553e438c-3d4e-4252-9021-a3725ea43f81\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671399 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/69ffdb33-2a31-4487-bc6c-ea6919c4a5dc-srv-cert\") pod \"catalog-operator-68c6474976-682gl\" (UID: \"69ffdb33-2a31-4487-bc6c-ea6919c4a5dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671414 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-trusted-ca\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671437 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/58a8bd42-a740-4f79-bed8-87ec3ab27a0d-trusted-ca\") pod \"console-operator-58897d9998-pkj67\" (UID: \"58a8bd42-a740-4f79-bed8-87ec3ab27a0d\") " pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671461 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/69ffdb33-2a31-4487-bc6c-ea6919c4a5dc-profile-collector-cert\") pod \"catalog-operator-68c6474976-682gl\" (UID: \"69ffdb33-2a31-4487-bc6c-ea6919c4a5dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671502 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-registry-tls\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671581 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4f95734-55e8-4d38-8d4a-51f9876fa4e1-config\") pod \"machine-approver-56656f9798-86crq\" (UID: \"b4f95734-55e8-4d38-8d4a-51f9876fa4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671627 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9d0021ed-bd4b-49bd-a0a1-b175e794697e-proxy-tls\") pod \"machine-config-controller-84d6567774-9xtpg\" (UID: \"9d0021ed-bd4b-49bd-a0a1-b175e794697e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671649 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnxtx\" (UniqueName: \"kubernetes.io/projected/9d0021ed-bd4b-49bd-a0a1-b175e794697e-kube-api-access-fnxtx\") pod \"machine-config-controller-84d6567774-9xtpg\" (UID: \"9d0021ed-bd4b-49bd-a0a1-b175e794697e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671674 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-974t9\" (UniqueName: \"kubernetes.io/projected/ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e-kube-api-access-974t9\") pod \"service-ca-9c57cc56f-rgrlp\" (UID: \"ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e\") " pod="openshift-service-ca/service-ca-9c57cc56f-rgrlp" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.671697 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9391167d-2391-4d04-a34a-6ac4aa19518b-config\") pod \"service-ca-operator-777779d784-wkjp6\" (UID: \"9391167d-2391-4d04-a34a-6ac4aa19518b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.674999 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b853669d-191c-4a38-96c8-290914042f96-metrics-certs\") pod \"router-default-5444994796-lgl7q\" (UID: \"b853669d-191c-4a38-96c8-290914042f96\") " pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.675027 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/29d9646d-bcc6-42a1-b082-286dc0bf9131-config-volume\") pod \"dns-default-flxzr\" (UID: \"29d9646d-bcc6-42a1-b082-286dc0bf9131\") " pod="openshift-dns/dns-default-flxzr" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.675068 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/8b4b4ce5-869d-4047-b1d5-b4365ded3549-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jzjr6\" (UID: \"8b4b4ce5-869d-4047-b1d5-b4365ded3549\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jzjr6" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.675090 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrvww\" (UniqueName: \"kubernetes.io/projected/4db84fcb-a010-4914-9975-9a99aafe6cb2-kube-api-access-lrvww\") pod \"machine-config-operator-74547568cd-ppjnj\" (UID: \"4db84fcb-a010-4914-9975-9a99aafe6cb2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.675125 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmn78\" (UniqueName: \"kubernetes.io/projected/e530e0e8-55f8-4601-b587-2069a7d6fec8-kube-api-access-wmn78\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.675158 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84tj4\" (UniqueName: \"kubernetes.io/projected/c5059510-c9f9-4945-8964-b74b62a6352d-kube-api-access-84tj4\") pod \"collect-profiles-29411415-rc7hx\" (UID: \"c5059510-c9f9-4945-8964-b74b62a6352d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.675197 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8f341c23-6370-47df-af45-d654197c980b-apiservice-cert\") pod \"packageserver-d55dfcdfc-tskrw\" (UID: \"8f341c23-6370-47df-af45-d654197c980b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.675228 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.675302 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4db84fcb-a010-4914-9975-9a99aafe6cb2-proxy-tls\") pod \"machine-config-operator-74547568cd-ppjnj\" (UID: \"4db84fcb-a010-4914-9975-9a99aafe6cb2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.675341 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vt9zv\" (UniqueName: \"kubernetes.io/projected/29d9646d-bcc6-42a1-b082-286dc0bf9131-kube-api-access-vt9zv\") pod \"dns-default-flxzr\" (UID: \"29d9646d-bcc6-42a1-b082-286dc0bf9131\") " pod="openshift-dns/dns-default-flxzr" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.675514 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.675541 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58a8bd42-a740-4f79-bed8-87ec3ab27a0d-config\") pod \"console-operator-58897d9998-pkj67\" (UID: \"58a8bd42-a740-4f79-bed8-87ec3ab27a0d\") " pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.675595 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.676940 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-registry-tls\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.677120 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.681136 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-bound-sa-token\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.682884 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tplxn\" (UniqueName: \"kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-kube-api-access-tplxn\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.776814 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj"] Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.777302 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:40 crc kubenswrapper[4902]: E1202 14:18:40.777731 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:41.277682987 +0000 UTC m=+152.468991696 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.777840 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/b4f95734-55e8-4d38-8d4a-51f9876fa4e1-machine-approver-tls\") pod \"machine-approver-56656f9798-86crq\" (UID: \"b4f95734-55e8-4d38-8d4a-51f9876fa4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.778000 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-etcd-service-ca\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.778025 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.778174 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/67986385-6a35-4b14-b38f-cdee1c9a1b5d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-fjmsz\" (UID: \"67986385-6a35-4b14-b38f-cdee1c9a1b5d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.778206 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.778275 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4db84fcb-a010-4914-9975-9a99aafe6cb2-auth-proxy-config\") pod \"machine-config-operator-74547568cd-ppjnj\" (UID: \"4db84fcb-a010-4914-9975-9a99aafe6cb2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.778306 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk2s2\" (UniqueName: \"kubernetes.io/projected/9391167d-2391-4d04-a34a-6ac4aa19518b-kube-api-access-zk2s2\") pod \"service-ca-operator-777779d784-wkjp6\" (UID: \"9391167d-2391-4d04-a34a-6ac4aa19518b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.778324 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gll5\" (UniqueName: \"kubernetes.io/projected/55a80003-f940-44a9-8f12-55f887816397-kube-api-access-6gll5\") pod \"machine-config-server-69fxq\" (UID: \"55a80003-f940-44a9-8f12-55f887816397\") " pod="openshift-machine-config-operator/machine-config-server-69fxq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.780727 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4df04066-9f98-4c4e-88eb-704118e339d3-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-szb48\" (UID: \"4df04066-9f98-4c4e-88eb-704118e339d3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.781706 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4df04066-9f98-4c4e-88eb-704118e339d3-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-szb48\" (UID: \"4df04066-9f98-4c4e-88eb-704118e339d3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782184 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-etcd-client\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782253 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26s2q\" (UniqueName: \"kubernetes.io/projected/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-kube-api-access-26s2q\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782277 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782299 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/54c71347-54d5-49cb-b5bb-12b0d607f8ad-socket-dir\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782345 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9768e394-e777-4957-9c84-1a1c0e70cc9f-audit-dir\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782367 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc5hk\" (UniqueName: \"kubernetes.io/projected/b4f95734-55e8-4d38-8d4a-51f9876fa4e1-kube-api-access-pc5hk\") pod \"machine-approver-56656f9798-86crq\" (UID: \"b4f95734-55e8-4d38-8d4a-51f9876fa4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782399 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b4f95734-55e8-4d38-8d4a-51f9876fa4e1-auth-proxy-config\") pod \"machine-approver-56656f9798-86crq\" (UID: \"b4f95734-55e8-4d38-8d4a-51f9876fa4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782420 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782447 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/553e438c-3d4e-4252-9021-a3725ea43f81-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xp79t\" (UID: \"553e438c-3d4e-4252-9021-a3725ea43f81\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782466 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-etcd-ca\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782510 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5plh\" (UniqueName: \"kubernetes.io/projected/67986385-6a35-4b14-b38f-cdee1c9a1b5d-kube-api-access-w5plh\") pod \"package-server-manager-789f6589d5-fjmsz\" (UID: \"67986385-6a35-4b14-b38f-cdee1c9a1b5d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782585 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81329550-5c15-4ad3-bc03-49a3c0506f9f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jpl42\" (UID: \"81329550-5c15-4ad3-bc03-49a3c0506f9f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782623 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9d0021ed-bd4b-49bd-a0a1-b175e794697e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9xtpg\" (UID: \"9d0021ed-bd4b-49bd-a0a1-b175e794697e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782613 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4db84fcb-a010-4914-9975-9a99aafe6cb2-auth-proxy-config\") pod \"machine-config-operator-74547568cd-ppjnj\" (UID: \"4db84fcb-a010-4914-9975-9a99aafe6cb2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782635 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/b4f95734-55e8-4d38-8d4a-51f9876fa4e1-machine-approver-tls\") pod \"machine-approver-56656f9798-86crq\" (UID: \"b4f95734-55e8-4d38-8d4a-51f9876fa4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782654 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8f341c23-6370-47df-af45-d654197c980b-tmpfs\") pod \"packageserver-d55dfcdfc-tskrw\" (UID: \"8f341c23-6370-47df-af45-d654197c980b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782689 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/326be887-d9c0-41bd-9eec-2c753701d197-cert\") pod \"ingress-canary-xknd4\" (UID: \"326be887-d9c0-41bd-9eec-2c753701d197\") " pod="openshift-ingress-canary/ingress-canary-xknd4" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782759 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjhnw\" (UniqueName: \"kubernetes.io/projected/b96aa7be-f972-4bd7-bd39-2d0a2017101c-kube-api-access-vjhnw\") pod \"multus-admission-controller-857f4d67dd-qmcz9\" (UID: \"b96aa7be-f972-4bd7-bd39-2d0a2017101c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qmcz9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782802 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/355cfe3d-0577-427d-a6fc-31df4e70e6d5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-z86lr\" (UID: \"355cfe3d-0577-427d-a6fc-31df4e70e6d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782841 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tcht\" (UniqueName: \"kubernetes.io/projected/8b4b4ce5-869d-4047-b1d5-b4365ded3549-kube-api-access-7tcht\") pod \"cluster-samples-operator-665b6dd947-jzjr6\" (UID: \"8b4b4ce5-869d-4047-b1d5-b4365ded3549\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jzjr6" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782892 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f0fd5896-8b33-4fb3-a56c-1e7741ca29df-available-featuregates\") pod \"openshift-config-operator-7777fb866f-k4csg\" (UID: \"f0fd5896-8b33-4fb3-a56c-1e7741ca29df\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782925 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-config\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782953 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.782987 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b96aa7be-f972-4bd7-bd39-2d0a2017101c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qmcz9\" (UID: \"b96aa7be-f972-4bd7-bd39-2d0a2017101c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qmcz9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.783026 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.783275 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81329550-5c15-4ad3-bc03-49a3c0506f9f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jpl42\" (UID: \"81329550-5c15-4ad3-bc03-49a3c0506f9f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.783385 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/54c71347-54d5-49cb-b5bb-12b0d607f8ad-socket-dir\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.783449 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9768e394-e777-4957-9c84-1a1c0e70cc9f-audit-dir\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.783529 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8f341c23-6370-47df-af45-d654197c980b-tmpfs\") pod \"packageserver-d55dfcdfc-tskrw\" (UID: \"8f341c23-6370-47df-af45-d654197c980b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.783659 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/67986385-6a35-4b14-b38f-cdee1c9a1b5d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-fjmsz\" (UID: \"67986385-6a35-4b14-b38f-cdee1c9a1b5d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.783352 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/29d9646d-bcc6-42a1-b082-286dc0bf9131-metrics-tls\") pod \"dns-default-flxzr\" (UID: \"29d9646d-bcc6-42a1-b082-286dc0bf9131\") " pod="openshift-dns/dns-default-flxzr" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.784365 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.784495 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-serving-cert\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.784758 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/009e1183-d309-4988-b20c-54c056613558-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ghgfc\" (UID: \"009e1183-d309-4988-b20c-54c056613558\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.784915 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k47t6\" (UniqueName: \"kubernetes.io/projected/b853669d-191c-4a38-96c8-290914042f96-kube-api-access-k47t6\") pod \"router-default-5444994796-lgl7q\" (UID: \"b853669d-191c-4a38-96c8-290914042f96\") " pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.785141 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vlwd\" (UniqueName: \"kubernetes.io/projected/9768e394-e777-4957-9c84-1a1c0e70cc9f-kube-api-access-9vlwd\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.785304 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/553e438c-3d4e-4252-9021-a3725ea43f81-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xp79t\" (UID: \"553e438c-3d4e-4252-9021-a3725ea43f81\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.785510 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5ml8\" (UniqueName: \"kubernetes.io/projected/81329550-5c15-4ad3-bc03-49a3c0506f9f-kube-api-access-d5ml8\") pod \"openshift-apiserver-operator-796bbdcf4f-jpl42\" (UID: \"81329550-5c15-4ad3-bc03-49a3c0506f9f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.785729 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/69ffdb33-2a31-4487-bc6c-ea6919c4a5dc-srv-cert\") pod \"catalog-operator-68c6474976-682gl\" (UID: \"69ffdb33-2a31-4487-bc6c-ea6919c4a5dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.786165 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/58a8bd42-a740-4f79-bed8-87ec3ab27a0d-trusted-ca\") pod \"console-operator-58897d9998-pkj67\" (UID: \"58a8bd42-a740-4f79-bed8-87ec3ab27a0d\") " pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.786471 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/69ffdb33-2a31-4487-bc6c-ea6919c4a5dc-profile-collector-cert\") pod \"catalog-operator-68c6474976-682gl\" (UID: \"69ffdb33-2a31-4487-bc6c-ea6919c4a5dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.787008 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-etcd-client\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.787194 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4f95734-55e8-4d38-8d4a-51f9876fa4e1-config\") pod \"machine-approver-56656f9798-86crq\" (UID: \"b4f95734-55e8-4d38-8d4a-51f9876fa4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.787358 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9d0021ed-bd4b-49bd-a0a1-b175e794697e-proxy-tls\") pod \"machine-config-controller-84d6567774-9xtpg\" (UID: \"9d0021ed-bd4b-49bd-a0a1-b175e794697e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.787673 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnxtx\" (UniqueName: \"kubernetes.io/projected/9d0021ed-bd4b-49bd-a0a1-b175e794697e-kube-api-access-fnxtx\") pod \"machine-config-controller-84d6567774-9xtpg\" (UID: \"9d0021ed-bd4b-49bd-a0a1-b175e794697e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.787839 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9391167d-2391-4d04-a34a-6ac4aa19518b-config\") pod \"service-ca-operator-777779d784-wkjp6\" (UID: \"9391167d-2391-4d04-a34a-6ac4aa19518b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.787995 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f0fd5896-8b33-4fb3-a56c-1e7741ca29df-available-featuregates\") pod \"openshift-config-operator-7777fb866f-k4csg\" (UID: \"f0fd5896-8b33-4fb3-a56c-1e7741ca29df\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.788001 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b853669d-191c-4a38-96c8-290914042f96-metrics-certs\") pod \"router-default-5444994796-lgl7q\" (UID: \"b853669d-191c-4a38-96c8-290914042f96\") " pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.788327 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/29d9646d-bcc6-42a1-b082-286dc0bf9131-config-volume\") pod \"dns-default-flxzr\" (UID: \"29d9646d-bcc6-42a1-b082-286dc0bf9131\") " pod="openshift-dns/dns-default-flxzr" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.788515 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/29d9646d-bcc6-42a1-b082-286dc0bf9131-metrics-tls\") pod \"dns-default-flxzr\" (UID: \"29d9646d-bcc6-42a1-b082-286dc0bf9131\") " pod="openshift-dns/dns-default-flxzr" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.788692 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-974t9\" (UniqueName: \"kubernetes.io/projected/ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e-kube-api-access-974t9\") pod \"service-ca-9c57cc56f-rgrlp\" (UID: \"ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e\") " pod="openshift-service-ca/service-ca-9c57cc56f-rgrlp" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.788864 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/8b4b4ce5-869d-4047-b1d5-b4365ded3549-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jzjr6\" (UID: \"8b4b4ce5-869d-4047-b1d5-b4365ded3549\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jzjr6" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.789017 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrvww\" (UniqueName: \"kubernetes.io/projected/4db84fcb-a010-4914-9975-9a99aafe6cb2-kube-api-access-lrvww\") pod \"machine-config-operator-74547568cd-ppjnj\" (UID: \"4db84fcb-a010-4914-9975-9a99aafe6cb2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.789187 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmn78\" (UniqueName: \"kubernetes.io/projected/e530e0e8-55f8-4601-b587-2069a7d6fec8-kube-api-access-wmn78\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.789328 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84tj4\" (UniqueName: \"kubernetes.io/projected/c5059510-c9f9-4945-8964-b74b62a6352d-kube-api-access-84tj4\") pod \"collect-profiles-29411415-rc7hx\" (UID: \"c5059510-c9f9-4945-8964-b74b62a6352d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.789457 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8f341c23-6370-47df-af45-d654197c980b-apiservice-cert\") pod \"packageserver-d55dfcdfc-tskrw\" (UID: \"8f341c23-6370-47df-af45-d654197c980b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.789603 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4db84fcb-a010-4914-9975-9a99aafe6cb2-proxy-tls\") pod \"machine-config-operator-74547568cd-ppjnj\" (UID: \"4db84fcb-a010-4914-9975-9a99aafe6cb2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.790217 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vt9zv\" (UniqueName: \"kubernetes.io/projected/29d9646d-bcc6-42a1-b082-286dc0bf9131-kube-api-access-vt9zv\") pod \"dns-default-flxzr\" (UID: \"29d9646d-bcc6-42a1-b082-286dc0bf9131\") " pod="openshift-dns/dns-default-flxzr" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.790333 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.790440 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.790590 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58a8bd42-a740-4f79-bed8-87ec3ab27a0d-config\") pod \"console-operator-58897d9998-pkj67\" (UID: \"58a8bd42-a740-4f79-bed8-87ec3ab27a0d\") " pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.790773 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.790864 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9d0021ed-bd4b-49bd-a0a1-b175e794697e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9xtpg\" (UID: \"9d0021ed-bd4b-49bd-a0a1-b175e794697e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.791028 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/54c71347-54d5-49cb-b5bb-12b0d607f8ad-registration-dir\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.791147 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5m9k\" (UniqueName: \"kubernetes.io/projected/8f341c23-6370-47df-af45-d654197c980b-kube-api-access-q5m9k\") pod \"packageserver-d55dfcdfc-tskrw\" (UID: \"8f341c23-6370-47df-af45-d654197c980b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.791302 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e530e0e8-55f8-4601-b587-2069a7d6fec8-audit-dir\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.791403 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpnch\" (UniqueName: \"kubernetes.io/projected/36555a35-fed5-4cc3-b0d1-940a5de68bfe-kube-api-access-kpnch\") pod \"downloads-7954f5f757-xxldq\" (UID: \"36555a35-fed5-4cc3-b0d1-940a5de68bfe\") " pod="openshift-console/downloads-7954f5f757-xxldq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.791537 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zc7cz\" (UniqueName: \"kubernetes.io/projected/07de0872-46f2-4a69-af4c-a811e7ee3a8d-kube-api-access-zc7cz\") pod \"control-plane-machine-set-operator-78cbb6b69f-rxhgz\" (UID: \"07de0872-46f2-4a69-af4c-a811e7ee3a8d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rxhgz" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.791690 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4df04066-9f98-4c4e-88eb-704118e339d3-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-szb48\" (UID: \"4df04066-9f98-4c4e-88eb-704118e339d3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.791831 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c5059510-c9f9-4945-8964-b74b62a6352d-config-volume\") pod \"collect-profiles-29411415-rc7hx\" (UID: \"c5059510-c9f9-4945-8964-b74b62a6352d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.791950 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8djfx\" (UniqueName: \"kubernetes.io/projected/54c71347-54d5-49cb-b5bb-12b0d607f8ad-kube-api-access-8djfx\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.792252 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/54c71347-54d5-49cb-b5bb-12b0d607f8ad-plugins-dir\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.792301 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rh72t\" (UniqueName: \"kubernetes.io/projected/326be887-d9c0-41bd-9eec-2c753701d197-kube-api-access-rh72t\") pod \"ingress-canary-xknd4\" (UID: \"326be887-d9c0-41bd-9eec-2c753701d197\") " pod="openshift-ingress-canary/ingress-canary-xknd4" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.792334 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9768e394-e777-4957-9c84-1a1c0e70cc9f-etcd-client\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.792366 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t86xh\" (UniqueName: \"kubernetes.io/projected/58a8bd42-a740-4f79-bed8-87ec3ab27a0d-kube-api-access-t86xh\") pod \"console-operator-58897d9998-pkj67\" (UID: \"58a8bd42-a740-4f79-bed8-87ec3ab27a0d\") " pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.792949 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwrrb\" (UniqueName: \"kubernetes.io/projected/4df04066-9f98-4c4e-88eb-704118e339d3-kube-api-access-zwrrb\") pod \"kube-storage-version-migrator-operator-b67b599dd-szb48\" (UID: \"4df04066-9f98-4c4e-88eb-704118e339d3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.792997 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/355cfe3d-0577-427d-a6fc-31df4e70e6d5-srv-cert\") pod \"olm-operator-6b444d44fb-z86lr\" (UID: \"355cfe3d-0577-427d-a6fc-31df4e70e6d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.793028 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/9768e394-e777-4957-9c84-1a1c0e70cc9f-encryption-config\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.793067 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.793127 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/55a80003-f940-44a9-8f12-55f887816397-node-bootstrap-token\") pod \"machine-config-server-69fxq\" (UID: \"55a80003-f940-44a9-8f12-55f887816397\") " pod="openshift-machine-config-operator/machine-config-server-69fxq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.793161 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9391167d-2391-4d04-a34a-6ac4aa19518b-serving-cert\") pod \"service-ca-operator-777779d784-wkjp6\" (UID: \"9391167d-2391-4d04-a34a-6ac4aa19518b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.793187 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c5059510-c9f9-4945-8964-b74b62a6352d-secret-volume\") pod \"collect-profiles-29411415-rc7hx\" (UID: \"c5059510-c9f9-4945-8964-b74b62a6352d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.793299 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/9768e394-e777-4957-9c84-1a1c0e70cc9f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.793410 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8f341c23-6370-47df-af45-d654197c980b-webhook-cert\") pod \"packageserver-d55dfcdfc-tskrw\" (UID: \"8f341c23-6370-47df-af45-d654197c980b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.793438 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9768e394-e777-4957-9c84-1a1c0e70cc9f-audit-policies\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.793482 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b853669d-191c-4a38-96c8-290914042f96-default-certificate\") pod \"router-default-5444994796-lgl7q\" (UID: \"b853669d-191c-4a38-96c8-290914042f96\") " pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.793510 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-audit-policies\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.786874 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.794191 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0fd5896-8b33-4fb3-a56c-1e7741ca29df-serving-cert\") pod \"openshift-config-operator-7777fb866f-k4csg\" (UID: \"f0fd5896-8b33-4fb3-a56c-1e7741ca29df\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.794246 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4xx8\" (UniqueName: \"kubernetes.io/projected/355cfe3d-0577-427d-a6fc-31df4e70e6d5-kube-api-access-p4xx8\") pod \"olm-operator-6b444d44fb-z86lr\" (UID: \"355cfe3d-0577-427d-a6fc-31df4e70e6d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.794292 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/54c71347-54d5-49cb-b5bb-12b0d607f8ad-csi-data-dir\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.794389 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/54c71347-54d5-49cb-b5bb-12b0d607f8ad-csi-data-dir\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.795379 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/553e438c-3d4e-4252-9021-a3725ea43f81-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xp79t\" (UID: \"553e438c-3d4e-4252-9021-a3725ea43f81\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.795445 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-audit-policies\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.795958 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e530e0e8-55f8-4601-b587-2069a7d6fec8-audit-dir\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.796009 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4f95734-55e8-4d38-8d4a-51f9876fa4e1-config\") pod \"machine-approver-56656f9798-86crq\" (UID: \"b4f95734-55e8-4d38-8d4a-51f9876fa4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.796994 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/58a8bd42-a740-4f79-bed8-87ec3ab27a0d-trusted-ca\") pod \"console-operator-58897d9998-pkj67\" (UID: \"58a8bd42-a740-4f79-bed8-87ec3ab27a0d\") " pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.797245 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjs6b\" (UniqueName: \"kubernetes.io/projected/69ffdb33-2a31-4487-bc6c-ea6919c4a5dc-kube-api-access-cjs6b\") pod \"catalog-operator-68c6474976-682gl\" (UID: \"69ffdb33-2a31-4487-bc6c-ea6919c4a5dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.797501 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4db84fcb-a010-4914-9975-9a99aafe6cb2-images\") pod \"machine-config-operator-74547568cd-ppjnj\" (UID: \"4db84fcb-a010-4914-9975-9a99aafe6cb2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.797547 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e-signing-cabundle\") pod \"service-ca-9c57cc56f-rgrlp\" (UID: \"ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e\") " pod="openshift-service-ca/service-ca-9c57cc56f-rgrlp" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.797616 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b853669d-191c-4a38-96c8-290914042f96-service-ca-bundle\") pod \"router-default-5444994796-lgl7q\" (UID: \"b853669d-191c-4a38-96c8-290914042f96\") " pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.797657 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58a8bd42-a740-4f79-bed8-87ec3ab27a0d-serving-cert\") pod \"console-operator-58897d9998-pkj67\" (UID: \"58a8bd42-a740-4f79-bed8-87ec3ab27a0d\") " pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.797695 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/07de0872-46f2-4a69-af4c-a811e7ee3a8d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-rxhgz\" (UID: \"07de0872-46f2-4a69-af4c-a811e7ee3a8d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rxhgz" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.797726 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/54c71347-54d5-49cb-b5bb-12b0d607f8ad-mountpoint-dir\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.797757 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/55a80003-f940-44a9-8f12-55f887816397-certs\") pod \"machine-config-server-69fxq\" (UID: \"55a80003-f940-44a9-8f12-55f887816397\") " pod="openshift-machine-config-operator/machine-config-server-69fxq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.797768 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-etcd-ca\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.797791 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e-signing-key\") pod \"service-ca-9c57cc56f-rgrlp\" (UID: \"ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e\") " pod="openshift-service-ca/service-ca-9c57cc56f-rgrlp" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.797299 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/54c71347-54d5-49cb-b5bb-12b0d607f8ad-registration-dir\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.797998 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zggld\" (UniqueName: \"kubernetes.io/projected/30d9c634-f04e-4333-bdc6-083b2fdf5e14-kube-api-access-zggld\") pod \"migrator-59844c95c7-m8fcn\" (UID: \"30d9c634-f04e-4333-bdc6-083b2fdf5e14\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-m8fcn" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.798036 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/553e438c-3d4e-4252-9021-a3725ea43f81-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xp79t\" (UID: \"553e438c-3d4e-4252-9021-a3725ea43f81\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.798088 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbw44\" (UniqueName: \"kubernetes.io/projected/f0fd5896-8b33-4fb3-a56c-1e7741ca29df-kube-api-access-sbw44\") pod \"openshift-config-operator-7777fb866f-k4csg\" (UID: \"f0fd5896-8b33-4fb3-a56c-1e7741ca29df\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.798120 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9768e394-e777-4957-9c84-1a1c0e70cc9f-serving-cert\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.798148 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9768e394-e777-4957-9c84-1a1c0e70cc9f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.798178 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8nk2\" (UniqueName: \"kubernetes.io/projected/009e1183-d309-4988-b20c-54c056613558-kube-api-access-n8nk2\") pod \"marketplace-operator-79b997595-ghgfc\" (UID: \"009e1183-d309-4988-b20c-54c056613558\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.798213 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.798251 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b853669d-191c-4a38-96c8-290914042f96-stats-auth\") pod \"router-default-5444994796-lgl7q\" (UID: \"b853669d-191c-4a38-96c8-290914042f96\") " pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.798323 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-serving-cert\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.797180 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.799519 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e-signing-cabundle\") pod \"service-ca-9c57cc56f-rgrlp\" (UID: \"ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e\") " pod="openshift-service-ca/service-ca-9c57cc56f-rgrlp" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.800077 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b4f95734-55e8-4d38-8d4a-51f9876fa4e1-auth-proxy-config\") pod \"machine-approver-56656f9798-86crq\" (UID: \"b4f95734-55e8-4d38-8d4a-51f9876fa4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.801063 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.802711 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4db84fcb-a010-4914-9975-9a99aafe6cb2-images\") pod \"machine-config-operator-74547568cd-ppjnj\" (UID: \"4db84fcb-a010-4914-9975-9a99aafe6cb2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.806279 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.807100 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58a8bd42-a740-4f79-bed8-87ec3ab27a0d-serving-cert\") pod \"console-operator-58897d9998-pkj67\" (UID: \"58a8bd42-a740-4f79-bed8-87ec3ab27a0d\") " pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.807263 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c5059510-c9f9-4945-8964-b74b62a6352d-config-volume\") pod \"collect-profiles-29411415-rc7hx\" (UID: \"c5059510-c9f9-4945-8964-b74b62a6352d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.807969 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.813176 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/54c71347-54d5-49cb-b5bb-12b0d607f8ad-mountpoint-dir\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.814706 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/54c71347-54d5-49cb-b5bb-12b0d607f8ad-plugins-dir\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.814790 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9391167d-2391-4d04-a34a-6ac4aa19518b-config\") pod \"service-ca-operator-777779d784-wkjp6\" (UID: \"9391167d-2391-4d04-a34a-6ac4aa19518b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.815734 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/326be887-d9c0-41bd-9eec-2c753701d197-cert\") pod \"ingress-canary-xknd4\" (UID: \"326be887-d9c0-41bd-9eec-2c753701d197\") " pod="openshift-ingress-canary/ingress-canary-xknd4" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.815798 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/009e1183-d309-4988-b20c-54c056613558-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ghgfc\" (UID: \"009e1183-d309-4988-b20c-54c056613558\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.817343 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/07de0872-46f2-4a69-af4c-a811e7ee3a8d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-rxhgz\" (UID: \"07de0872-46f2-4a69-af4c-a811e7ee3a8d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rxhgz" Dec 02 14:18:40 crc kubenswrapper[4902]: I1202 14:18:40.823263 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-config\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.532409 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/553e438c-3d4e-4252-9021-a3725ea43f81-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xp79t\" (UID: \"553e438c-3d4e-4252-9021-a3725ea43f81\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.534337 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk2s2\" (UniqueName: \"kubernetes.io/projected/9391167d-2391-4d04-a34a-6ac4aa19518b-kube-api-access-zk2s2\") pod \"service-ca-operator-777779d784-wkjp6\" (UID: \"9391167d-2391-4d04-a34a-6ac4aa19518b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.535250 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b853669d-191c-4a38-96c8-290914042f96-service-ca-bundle\") pod \"router-default-5444994796-lgl7q\" (UID: \"b853669d-191c-4a38-96c8-290914042f96\") " pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.540856 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.542208 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/69ffdb33-2a31-4487-bc6c-ea6919c4a5dc-srv-cert\") pod \"catalog-operator-68c6474976-682gl\" (UID: \"69ffdb33-2a31-4487-bc6c-ea6919c4a5dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.543143 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e-signing-key\") pod \"service-ca-9c57cc56f-rgrlp\" (UID: \"ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e\") " pod="openshift-service-ca/service-ca-9c57cc56f-rgrlp" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.543261 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0fd5896-8b33-4fb3-a56c-1e7741ca29df-serving-cert\") pod \"openshift-config-operator-7777fb866f-k4csg\" (UID: \"f0fd5896-8b33-4fb3-a56c-1e7741ca29df\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.543291 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b853669d-191c-4a38-96c8-290914042f96-metrics-certs\") pod \"router-default-5444994796-lgl7q\" (UID: \"b853669d-191c-4a38-96c8-290914042f96\") " pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.545288 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/9768e394-e777-4957-9c84-1a1c0e70cc9f-encryption-config\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.545407 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26s2q\" (UniqueName: \"kubernetes.io/projected/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-kube-api-access-26s2q\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.543830 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8-etcd-service-ca\") pod \"etcd-operator-b45778765-gh8sw\" (UID: \"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.546218 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjhnw\" (UniqueName: \"kubernetes.io/projected/b96aa7be-f972-4bd7-bd39-2d0a2017101c-kube-api-access-vjhnw\") pod \"multus-admission-controller-857f4d67dd-qmcz9\" (UID: \"b96aa7be-f972-4bd7-bd39-2d0a2017101c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qmcz9" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.546414 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/9768e394-e777-4957-9c84-1a1c0e70cc9f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.547820 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/355cfe3d-0577-427d-a6fc-31df4e70e6d5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-z86lr\" (UID: \"355cfe3d-0577-427d-a6fc-31df4e70e6d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.546909 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4df04066-9f98-4c4e-88eb-704118e339d3-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-szb48\" (UID: \"4df04066-9f98-4c4e-88eb-704118e339d3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.550355 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/29d9646d-bcc6-42a1-b082-286dc0bf9131-config-volume\") pod \"dns-default-flxzr\" (UID: \"29d9646d-bcc6-42a1-b082-286dc0bf9131\") " pod="openshift-dns/dns-default-flxzr" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.551034 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9768e394-e777-4957-9c84-1a1c0e70cc9f-etcd-client\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.551060 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9768e394-e777-4957-9c84-1a1c0e70cc9f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.551963 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81329550-5c15-4ad3-bc03-49a3c0506f9f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jpl42\" (UID: \"81329550-5c15-4ad3-bc03-49a3c0506f9f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.552271 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/355cfe3d-0577-427d-a6fc-31df4e70e6d5-srv-cert\") pod \"olm-operator-6b444d44fb-z86lr\" (UID: \"355cfe3d-0577-427d-a6fc-31df4e70e6d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.552479 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.552531 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9d0021ed-bd4b-49bd-a0a1-b175e794697e-proxy-tls\") pod \"machine-config-controller-84d6567774-9xtpg\" (UID: \"9d0021ed-bd4b-49bd-a0a1-b175e794697e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.552964 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gll5\" (UniqueName: \"kubernetes.io/projected/55a80003-f940-44a9-8f12-55f887816397-kube-api-access-6gll5\") pod \"machine-config-server-69fxq\" (UID: \"55a80003-f940-44a9-8f12-55f887816397\") " pod="openshift-machine-config-operator/machine-config-server-69fxq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.553662 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9768e394-e777-4957-9c84-1a1c0e70cc9f-audit-policies\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.553681 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/55a80003-f940-44a9-8f12-55f887816397-certs\") pod \"machine-config-server-69fxq\" (UID: \"55a80003-f940-44a9-8f12-55f887816397\") " pod="openshift-machine-config-operator/machine-config-server-69fxq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.556487 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tcht\" (UniqueName: \"kubernetes.io/projected/8b4b4ce5-869d-4047-b1d5-b4365ded3549-kube-api-access-7tcht\") pod \"cluster-samples-operator-665b6dd947-jzjr6\" (UID: \"8b4b4ce5-869d-4047-b1d5-b4365ded3549\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jzjr6" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.560452 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t86xh\" (UniqueName: \"kubernetes.io/projected/58a8bd42-a740-4f79-bed8-87ec3ab27a0d-kube-api-access-t86xh\") pod \"console-operator-58897d9998-pkj67\" (UID: \"58a8bd42-a740-4f79-bed8-87ec3ab27a0d\") " pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.560555 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.567785 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc5hk\" (UniqueName: \"kubernetes.io/projected/b4f95734-55e8-4d38-8d4a-51f9876fa4e1-kube-api-access-pc5hk\") pod \"machine-approver-56656f9798-86crq\" (UID: \"b4f95734-55e8-4d38-8d4a-51f9876fa4e1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.569371 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.575507 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/009e1183-d309-4988-b20c-54c056613558-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ghgfc\" (UID: \"009e1183-d309-4988-b20c-54c056613558\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.581116 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8f341c23-6370-47df-af45-d654197c980b-webhook-cert\") pod \"packageserver-d55dfcdfc-tskrw\" (UID: \"8f341c23-6370-47df-af45-d654197c980b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.583090 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rh72t\" (UniqueName: \"kubernetes.io/projected/326be887-d9c0-41bd-9eec-2c753701d197-kube-api-access-rh72t\") pod \"ingress-canary-xknd4\" (UID: \"326be887-d9c0-41bd-9eec-2c753701d197\") " pod="openshift-ingress-canary/ingress-canary-xknd4" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.584012 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpnch\" (UniqueName: \"kubernetes.io/projected/36555a35-fed5-4cc3-b0d1-940a5de68bfe-kube-api-access-kpnch\") pod \"downloads-7954f5f757-xxldq\" (UID: \"36555a35-fed5-4cc3-b0d1-940a5de68bfe\") " pod="openshift-console/downloads-7954f5f757-xxldq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.589527 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8f341c23-6370-47df-af45-d654197c980b-apiservice-cert\") pod \"packageserver-d55dfcdfc-tskrw\" (UID: \"8f341c23-6370-47df-af45-d654197c980b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.590178 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9768e394-e777-4957-9c84-1a1c0e70cc9f-serving-cert\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.597494 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8djfx\" (UniqueName: \"kubernetes.io/projected/54c71347-54d5-49cb-b5bb-12b0d607f8ad-kube-api-access-8djfx\") pod \"csi-hostpathplugin-f4244\" (UID: \"54c71347-54d5-49cb-b5bb-12b0d607f8ad\") " pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.598371 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/8b4b4ce5-869d-4047-b1d5-b4365ded3549-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jzjr6\" (UID: \"8b4b4ce5-869d-4047-b1d5-b4365ded3549\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jzjr6" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.598443 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k47t6\" (UniqueName: \"kubernetes.io/projected/b853669d-191c-4a38-96c8-290914042f96-kube-api-access-k47t6\") pod \"router-default-5444994796-lgl7q\" (UID: \"b853669d-191c-4a38-96c8-290914042f96\") " pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.600292 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9391167d-2391-4d04-a34a-6ac4aa19518b-serving-cert\") pod \"service-ca-operator-777779d784-wkjp6\" (UID: \"9391167d-2391-4d04-a34a-6ac4aa19518b\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.600325 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/69ffdb33-2a31-4487-bc6c-ea6919c4a5dc-profile-collector-cert\") pod \"catalog-operator-68c6474976-682gl\" (UID: \"69ffdb33-2a31-4487-bc6c-ea6919c4a5dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.600342 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c5059510-c9f9-4945-8964-b74b62a6352d-secret-volume\") pod \"collect-profiles-29411415-rc7hx\" (UID: \"c5059510-c9f9-4945-8964-b74b62a6352d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" Dec 02 14:18:41 crc kubenswrapper[4902]: E1202 14:18:41.601421 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:42.101400982 +0000 UTC m=+153.292709701 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.601916 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b853669d-191c-4a38-96c8-290914042f96-stats-auth\") pod \"router-default-5444994796-lgl7q\" (UID: \"b853669d-191c-4a38-96c8-290914042f96\") " pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.603439 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4db84fcb-a010-4914-9975-9a99aafe6cb2-proxy-tls\") pod \"machine-config-operator-74547568cd-ppjnj\" (UID: \"4db84fcb-a010-4914-9975-9a99aafe6cb2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.603879 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/009e1183-d309-4988-b20c-54c056613558-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ghgfc\" (UID: \"009e1183-d309-4988-b20c-54c056613558\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.605986 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58a8bd42-a740-4f79-bed8-87ec3ab27a0d-config\") pod \"console-operator-58897d9998-pkj67\" (UID: \"58a8bd42-a740-4f79-bed8-87ec3ab27a0d\") " pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.606243 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.607454 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8nk2\" (UniqueName: \"kubernetes.io/projected/009e1183-d309-4988-b20c-54c056613558-kube-api-access-n8nk2\") pod \"marketplace-operator-79b997595-ghgfc\" (UID: \"009e1183-d309-4988-b20c-54c056613558\") " pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.608512 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.609353 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwrrb\" (UniqueName: \"kubernetes.io/projected/4df04066-9f98-4c4e-88eb-704118e339d3-kube-api-access-zwrrb\") pod \"kube-storage-version-migrator-operator-b67b599dd-szb48\" (UID: \"4df04066-9f98-4c4e-88eb-704118e339d3\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.609697 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmn78\" (UniqueName: \"kubernetes.io/projected/e530e0e8-55f8-4601-b587-2069a7d6fec8-kube-api-access-wmn78\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.610331 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" event={"ID":"90af0e3f-43b4-4349-933b-1d9c22b76438","Type":"ContainerStarted","Data":"9b04c2171792b8a4fb0a86695f720ff65b7b10f1824bdb7dbccc9c72b6402620"} Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.610485 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/55a80003-f940-44a9-8f12-55f887816397-node-bootstrap-token\") pod \"machine-config-server-69fxq\" (UID: \"55a80003-f940-44a9-8f12-55f887816397\") " pod="openshift-machine-config-operator/machine-config-server-69fxq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.611032 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zggld\" (UniqueName: \"kubernetes.io/projected/30d9c634-f04e-4333-bdc6-083b2fdf5e14-kube-api-access-zggld\") pod \"migrator-59844c95c7-m8fcn\" (UID: \"30d9c634-f04e-4333-bdc6-083b2fdf5e14\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-m8fcn" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.612153 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5m9k\" (UniqueName: \"kubernetes.io/projected/8f341c23-6370-47df-af45-d654197c980b-kube-api-access-q5m9k\") pod \"packageserver-d55dfcdfc-tskrw\" (UID: \"8f341c23-6370-47df-af45-d654197c980b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.612308 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5plh\" (UniqueName: \"kubernetes.io/projected/67986385-6a35-4b14-b38f-cdee1c9a1b5d-kube-api-access-w5plh\") pod \"package-server-manager-789f6589d5-fjmsz\" (UID: \"67986385-6a35-4b14-b38f-cdee1c9a1b5d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.614211 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zc7cz\" (UniqueName: \"kubernetes.io/projected/07de0872-46f2-4a69-af4c-a811e7ee3a8d-kube-api-access-zc7cz\") pod \"control-plane-machine-set-operator-78cbb6b69f-rxhgz\" (UID: \"07de0872-46f2-4a69-af4c-a811e7ee3a8d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rxhgz" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.614752 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbw44\" (UniqueName: \"kubernetes.io/projected/f0fd5896-8b33-4fb3-a56c-1e7741ca29df-kube-api-access-sbw44\") pod \"openshift-config-operator-7777fb866f-k4csg\" (UID: \"f0fd5896-8b33-4fb3-a56c-1e7741ca29df\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.614859 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" event={"ID":"d0119aeb-1cd1-4503-ba6d-062f07f01491","Type":"ContainerStarted","Data":"1df0fb1ebb3d42a7628fb4c81cec5a4b7fa801b47d2c935a3d3adb21759d17c9"} Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.614927 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84tj4\" (UniqueName: \"kubernetes.io/projected/c5059510-c9f9-4945-8964-b74b62a6352d-kube-api-access-84tj4\") pod \"collect-profiles-29411415-rc7hx\" (UID: \"c5059510-c9f9-4945-8964-b74b62a6352d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.615541 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b96aa7be-f972-4bd7-bd39-2d0a2017101c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qmcz9\" (UID: \"b96aa7be-f972-4bd7-bd39-2d0a2017101c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qmcz9" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.616919 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" event={"ID":"f61a29cb-55b4-42b7-817f-67600cb70980","Type":"ContainerStarted","Data":"76e5a21021a65d29a66e6a0752b5f19ebc3765d7ab7a6d28ea4fa544ff4272a5"} Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.617722 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/553e438c-3d4e-4252-9021-a3725ea43f81-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xp79t\" (UID: \"553e438c-3d4e-4252-9021-a3725ea43f81\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.619052 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5ml8\" (UniqueName: \"kubernetes.io/projected/81329550-5c15-4ad3-bc03-49a3c0506f9f-kube-api-access-d5ml8\") pod \"openshift-apiserver-operator-796bbdcf4f-jpl42\" (UID: \"81329550-5c15-4ad3-bc03-49a3c0506f9f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.620944 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4xx8\" (UniqueName: \"kubernetes.io/projected/355cfe3d-0577-427d-a6fc-31df4e70e6d5-kube-api-access-p4xx8\") pod \"olm-operator-6b444d44fb-z86lr\" (UID: \"355cfe3d-0577-427d-a6fc-31df4e70e6d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.621199 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-g9hgb\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.623040 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.624257 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.627950 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81329550-5c15-4ad3-bc03-49a3c0506f9f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jpl42\" (UID: \"81329550-5c15-4ad3-bc03-49a3c0506f9f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.629732 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-69fxq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.630242 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrvww\" (UniqueName: \"kubernetes.io/projected/4db84fcb-a010-4914-9975-9a99aafe6cb2-kube-api-access-lrvww\") pod \"machine-config-operator-74547568cd-ppjnj\" (UID: \"4db84fcb-a010-4914-9975-9a99aafe6cb2\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.630799 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-974t9\" (UniqueName: \"kubernetes.io/projected/ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e-kube-api-access-974t9\") pod \"service-ca-9c57cc56f-rgrlp\" (UID: \"ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e\") " pod="openshift-service-ca/service-ca-9c57cc56f-rgrlp" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.632063 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b853669d-191c-4a38-96c8-290914042f96-default-certificate\") pod \"router-default-5444994796-lgl7q\" (UID: \"b853669d-191c-4a38-96c8-290914042f96\") " pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.632148 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vt9zv\" (UniqueName: \"kubernetes.io/projected/29d9646d-bcc6-42a1-b082-286dc0bf9131-kube-api-access-vt9zv\") pod \"dns-default-flxzr\" (UID: \"29d9646d-bcc6-42a1-b082-286dc0bf9131\") " pod="openshift-dns/dns-default-flxzr" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.640657 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnxtx\" (UniqueName: \"kubernetes.io/projected/9d0021ed-bd4b-49bd-a0a1-b175e794697e-kube-api-access-fnxtx\") pod \"machine-config-controller-84d6567774-9xtpg\" (UID: \"9d0021ed-bd4b-49bd-a0a1-b175e794697e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.651394 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vlwd\" (UniqueName: \"kubernetes.io/projected/9768e394-e777-4957-9c84-1a1c0e70cc9f-kube-api-access-9vlwd\") pod \"apiserver-7bbb656c7d-vf7qq\" (UID: \"9768e394-e777-4957-9c84-1a1c0e70cc9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.651413 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.651506 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjs6b\" (UniqueName: \"kubernetes.io/projected/69ffdb33-2a31-4487-bc6c-ea6919c4a5dc-kube-api-access-cjs6b\") pod \"catalog-operator-68c6474976-682gl\" (UID: \"69ffdb33-2a31-4487-bc6c-ea6919c4a5dc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.653612 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-f4244" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.659806 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-xknd4" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.666068 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-flxzr" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.676550 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:41 crc kubenswrapper[4902]: E1202 14:18:41.676890 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:42.176868565 +0000 UTC m=+153.368177274 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.721868 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-xxldq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.728031 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.735247 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.743384 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.760351 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jzjr6" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.763879 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x"] Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.778037 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:41 crc kubenswrapper[4902]: E1202 14:18:41.778375 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:42.278359121 +0000 UTC m=+153.469667920 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.806669 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.815112 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.820879 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.826626 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.835623 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.839854 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-m8fcn" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.847665 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.848200 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.852340 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-qmcz9" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.857508 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rxhgz" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.864007 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.869743 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.878876 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.879485 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:41 crc kubenswrapper[4902]: E1202 14:18:41.879905 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:42.379888808 +0000 UTC m=+153.571197507 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.889879 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.892766 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.908011 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-rgrlp" Dec 02 14:18:41 crc kubenswrapper[4902]: I1202 14:18:41.982189 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:41 crc kubenswrapper[4902]: E1202 14:18:41.982504 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:42.482492246 +0000 UTC m=+153.673800955 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.068103 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q"] Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.073735 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-mxmxt"] Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.083881 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:42 crc kubenswrapper[4902]: E1202 14:18:42.087921 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:42.587895644 +0000 UTC m=+153.779204343 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.090358 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-xxldq"] Dec 02 14:18:42 crc kubenswrapper[4902]: W1202 14:18:42.110482 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda21648d7_b40f_4da6_a228_c44b785c788a.slice/crio-f48875393616bde8da31b5ab23ef55c79065d11752870f05b5fcc2bea0a5f182 WatchSource:0}: Error finding container f48875393616bde8da31b5ab23ef55c79065d11752870f05b5fcc2bea0a5f182: Status 404 returned error can't find the container with id f48875393616bde8da31b5ab23ef55c79065d11752870f05b5fcc2bea0a5f182 Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.125325 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-pkj67"] Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.147380 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht"] Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.151727 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn"] Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.163556 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-svzpr"] Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.190837 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:42 crc kubenswrapper[4902]: E1202 14:18:42.191158 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:42.69114672 +0000 UTC m=+153.882455429 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.203392 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6"] Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.208949 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg"] Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.212549 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5g67q"] Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.249924 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t"] Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.291874 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:42 crc kubenswrapper[4902]: E1202 14:18:42.292130 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:42.792108291 +0000 UTC m=+153.983417000 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.292365 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:42 crc kubenswrapper[4902]: E1202 14:18:42.297837 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:42.797800143 +0000 UTC m=+153.989108852 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.404184 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:42 crc kubenswrapper[4902]: E1202 14:18:42.404432 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:42.904406915 +0000 UTC m=+154.095715624 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.404594 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:42 crc kubenswrapper[4902]: E1202 14:18:42.404980 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:42.904964821 +0000 UTC m=+154.096273530 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.505893 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:42 crc kubenswrapper[4902]: E1202 14:18:42.506221 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:43.00620716 +0000 UTC m=+154.197515869 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.608009 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:42 crc kubenswrapper[4902]: E1202 14:18:42.608391 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:43.108374915 +0000 UTC m=+154.299683624 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.624070 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"b0dae67aac723ee81b5073c68311235e9625410949e8f71be83fcec0708d5d89"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.627123 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" event={"ID":"6244913f-facc-4fe2-be8c-09238b2704d6","Type":"ContainerStarted","Data":"1ead538245c8680cce8c10772268cd100641c05c59a7f3f21f6ce7c2c5259963"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.628574 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-xxldq" event={"ID":"36555a35-fed5-4cc3-b0d1-940a5de68bfe","Type":"ContainerStarted","Data":"25c9d479ca0106334cea759052e7d0d77a76da67537f498e6d7ef548c70a9d89"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.629871 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" event={"ID":"16d60e4a-c323-46b9-98ac-7ce9e1949e9f","Type":"ContainerStarted","Data":"7ab80dee3b793fd2ea109436c011d139de2806d44536145431b14db20bede582"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.630984 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-pkj67" event={"ID":"58a8bd42-a740-4f79-bed8-87ec3ab27a0d","Type":"ContainerStarted","Data":"7560c7f9a01708cbefc735fb1fd3a7a1ee714d254084f0875f2dbdd47810751c"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.632856 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" event={"ID":"9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292","Type":"ContainerStarted","Data":"db5c2704559b19d4db2cf6258a57c22b595038888c22c2ca2099e72f7d0eb012"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.637932 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6" event={"ID":"9391167d-2391-4d04-a34a-6ac4aa19518b","Type":"ContainerStarted","Data":"c1cd1d2c49ddd7aaec12a3515280d3c4c9a9c117d8fbabab8d372133e77ef168"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.640114 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg" event={"ID":"9d0021ed-bd4b-49bd-a0a1-b175e794697e","Type":"ContainerStarted","Data":"8b8c9a431d3a70d4a4735e68a6ccc9acd4ffa4c573b95f5914d956a3f0f20557"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.641517 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-svzpr" event={"ID":"e5dcbc97-fe74-4313-8205-4b8fd0eb2c13","Type":"ContainerStarted","Data":"f2acc9ccdb650d79c39509c8d45cac54fb40bd669cf7ebc02d1f2a4692bd344f"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.642840 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj" event={"ID":"2c2ef3a5-4e54-4162-bef2-60a40dc206f7","Type":"ContainerStarted","Data":"968e0dd10a59f830af7aeb2cd776ecf37bb256f0f9c5a9323f5ad9b438f31c47"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.643965 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-lgl7q" event={"ID":"b853669d-191c-4a38-96c8-290914042f96","Type":"ContainerStarted","Data":"d29a9967e78a1d588ef478ead9563e84154fa71aed63590f862c247d8d9b0440"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.644946 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht" event={"ID":"2996d85c-85fd-4462-9f50-b5dea4b5a762","Type":"ContainerStarted","Data":"145768ea0ff6abe442a74982319afa73fa95ff85279a16bc88b937991e5d4ce6"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.647316 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-69fxq" event={"ID":"55a80003-f940-44a9-8f12-55f887816397","Type":"ContainerStarted","Data":"7cbc6576f763815155b227e9ec8e909f2bbe3abd2d34a2456442eb74a09c92b6"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.648737 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q" event={"ID":"a21648d7-b40f-4da6-a228-c44b785c788a","Type":"ContainerStarted","Data":"f48875393616bde8da31b5ab23ef55c79065d11752870f05b5fcc2bea0a5f182"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.649685 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t" event={"ID":"553e438c-3d4e-4252-9021-a3725ea43f81","Type":"ContainerStarted","Data":"180c83af91921342f4aa10221188986c23e4ac65f49440bdbad5d95540619ab5"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.650611 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" event={"ID":"b4f95734-55e8-4d38-8d4a-51f9876fa4e1","Type":"ContainerStarted","Data":"5017f4beae44cc94b0c37248bc40c08a6d82ce03d1e97a132873f811787502dd"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.651508 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" event={"ID":"93abc37a-cc70-4b86-bd3f-4d3945d029f4","Type":"ContainerStarted","Data":"47140f60476dc82219f84286a584161b211742c17ff76cb165a07c5da59d3b1a"} Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.709009 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:42 crc kubenswrapper[4902]: E1202 14:18:42.709644 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:43.209629475 +0000 UTC m=+154.400938174 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.810330 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:42 crc kubenswrapper[4902]: E1202 14:18:42.810687 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:43.310675038 +0000 UTC m=+154.501983747 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.857787 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g9hgb"] Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.865862 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-flxzr"] Dec 02 14:18:42 crc kubenswrapper[4902]: W1202 14:18:42.871273 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode530e0e8_55f8_4601_b587_2069a7d6fec8.slice/crio-315d5bf01b0c0d0c1540dce0e7cf7fdc2904a3227cd5a6af0bdc82e264332499 WatchSource:0}: Error finding container 315d5bf01b0c0d0c1540dce0e7cf7fdc2904a3227cd5a6af0bdc82e264332499: Status 404 returned error can't find the container with id 315d5bf01b0c0d0c1540dce0e7cf7fdc2904a3227cd5a6af0bdc82e264332499 Dec 02 14:18:42 crc kubenswrapper[4902]: W1202 14:18:42.885947 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29d9646d_bcc6_42a1_b082_286dc0bf9131.slice/crio-f18ec88e9d7fa34a1f658300a47908a0c562bf6d60e35b9b5ae8a23c0d5f9a24 WatchSource:0}: Error finding container f18ec88e9d7fa34a1f658300a47908a0c562bf6d60e35b9b5ae8a23c0d5f9a24: Status 404 returned error can't find the container with id f18ec88e9d7fa34a1f658300a47908a0c562bf6d60e35b9b5ae8a23c0d5f9a24 Dec 02 14:18:42 crc kubenswrapper[4902]: I1202 14:18:42.911431 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:42 crc kubenswrapper[4902]: E1202 14:18:42.912023 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:43.412004609 +0000 UTC m=+154.603313318 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.017686 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.018815 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:43.518798647 +0000 UTC m=+154.710107346 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.030854 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-xknd4"] Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.034456 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48"] Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.036338 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-f4244"] Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.062639 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz"] Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.063992 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rxhgz"] Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.069667 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr"] Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.072845 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jzjr6"] Dec 02 14:18:43 crc kubenswrapper[4902]: W1202 14:18:43.080697 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod69ffdb33_2a31_4487_bc6c_ea6919c4a5dc.slice/crio-2785af9669204f67dd0f3d9d98a8b3f313db858d465755c280aa695585f51cdd WatchSource:0}: Error finding container 2785af9669204f67dd0f3d9d98a8b3f313db858d465755c280aa695585f51cdd: Status 404 returned error can't find the container with id 2785af9669204f67dd0f3d9d98a8b3f313db858d465755c280aa695585f51cdd Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.082428 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl"] Dec 02 14:18:43 crc kubenswrapper[4902]: W1202 14:18:43.084249 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4df04066_9f98_4c4e_88eb_704118e339d3.slice/crio-dbc1adf4114ddf0a04992502a56ff69c36df8b89040ae3185ed393e37bd364ae WatchSource:0}: Error finding container dbc1adf4114ddf0a04992502a56ff69c36df8b89040ae3185ed393e37bd364ae: Status 404 returned error can't find the container with id dbc1adf4114ddf0a04992502a56ff69c36df8b89040ae3185ed393e37bd364ae Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.084303 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-k4csg"] Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.085917 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw"] Dec 02 14:18:43 crc kubenswrapper[4902]: W1202 14:18:43.096713 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf0fd5896_8b33_4fb3_a56c_1e7741ca29df.slice/crio-1231f1433520b6a5e26540e726032f4c8a7fe41d8e8cdf6c0cf36560fe7f1a8c WatchSource:0}: Error finding container 1231f1433520b6a5e26540e726032f4c8a7fe41d8e8cdf6c0cf36560fe7f1a8c: Status 404 returned error can't find the container with id 1231f1433520b6a5e26540e726032f4c8a7fe41d8e8cdf6c0cf36560fe7f1a8c Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.118111 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.118427 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:43.618402239 +0000 UTC m=+154.809710948 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.118692 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.119267 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:43.619258083 +0000 UTC m=+154.810566792 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.172643 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rgrlp"] Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.173677 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42"] Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.219623 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.219767 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:43.719743361 +0000 UTC m=+154.911052070 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.219883 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.220159 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:43.720151882 +0000 UTC m=+154.911460591 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.321121 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.321382 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:43.82133919 +0000 UTC m=+155.012647939 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.321530 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.322051 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:43.822029929 +0000 UTC m=+155.013338678 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.423057 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.423371 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:43.92332972 +0000 UTC m=+155.114638479 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.498436 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj"] Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.503404 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ghgfc"] Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.505362 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-gh8sw"] Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.514423 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx"] Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.517381 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq"] Dec 02 14:18:43 crc kubenswrapper[4902]: W1202 14:18:43.518232 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce62e788_ddf8_4a4e_ab48_67ca67fa3f6e.slice/crio-59ddbfd06db5f9e4692891f19d87dc171eb7ae34597833f5386d3e135cc5fb0c WatchSource:0}: Error finding container 59ddbfd06db5f9e4692891f19d87dc171eb7ae34597833f5386d3e135cc5fb0c: Status 404 returned error can't find the container with id 59ddbfd06db5f9e4692891f19d87dc171eb7ae34597833f5386d3e135cc5fb0c Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.519163 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qmcz9"] Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.523905 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-m8fcn"] Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.524181 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.524611 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.024555779 +0000 UTC m=+155.215864498 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: W1202 14:18:43.546614 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4db84fcb_a010_4914_9975_9a99aafe6cb2.slice/crio-685bde127153ce2ea7f7024b95fe057d436d18ca6114a401b3434bfebfff4204 WatchSource:0}: Error finding container 685bde127153ce2ea7f7024b95fe057d436d18ca6114a401b3434bfebfff4204: Status 404 returned error can't find the container with id 685bde127153ce2ea7f7024b95fe057d436d18ca6114a401b3434bfebfff4204 Dec 02 14:18:43 crc kubenswrapper[4902]: W1202 14:18:43.548861 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b0ec2e4_cbd7_4fdf_acf6_ebcda99749c8.slice/crio-4f700ada7aec75e62bb74a21a2643a9479f2bc0c1725e9503f7924660d97e161 WatchSource:0}: Error finding container 4f700ada7aec75e62bb74a21a2643a9479f2bc0c1725e9503f7924660d97e161: Status 404 returned error can't find the container with id 4f700ada7aec75e62bb74a21a2643a9479f2bc0c1725e9503f7924660d97e161 Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.624892 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.625089 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.125064506 +0000 UTC m=+155.316373215 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.627010 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.627428 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.127415974 +0000 UTC m=+155.318724753 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.656956 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz" event={"ID":"67986385-6a35-4b14-b38f-cdee1c9a1b5d","Type":"ContainerStarted","Data":"0665e1fd17f6afc9dc8e00f0106811f795f565300e5f883d08cc1204b32199aa"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.657917 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42" event={"ID":"81329550-5c15-4ad3-bc03-49a3c0506f9f","Type":"ContainerStarted","Data":"096d56bb29c2e61f1fac3faeb292901aabfcfc7db445f38b2dde8ba3689f464b"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.660023 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" event={"ID":"e530e0e8-55f8-4601-b587-2069a7d6fec8","Type":"ContainerStarted","Data":"315d5bf01b0c0d0c1540dce0e7cf7fdc2904a3227cd5a6af0bdc82e264332499"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.661329 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" event={"ID":"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8","Type":"ContainerStarted","Data":"4f700ada7aec75e62bb74a21a2643a9479f2bc0c1725e9503f7924660d97e161"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.662465 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rxhgz" event={"ID":"07de0872-46f2-4a69-af4c-a811e7ee3a8d","Type":"ContainerStarted","Data":"377d038142d9137b8d2963b518e8402d7dcb47740ddee46f1634f13b5687ff92"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.664514 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"0613855c9e962e5efa654fc9f3460e5f0935f88b0bf0f3001e7bb64064042b14"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.666047 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-f4244" event={"ID":"54c71347-54d5-49cb-b5bb-12b0d607f8ad","Type":"ContainerStarted","Data":"54c13b6106a25f96a452129497e1eb7a0e84251dfc58fdda87ec3b3620669145"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.667365 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" event={"ID":"8f341c23-6370-47df-af45-d654197c980b","Type":"ContainerStarted","Data":"46fc2b26553b51718ac214edb38d17a1c7bb5ae0bee786b223b013c70efb9eb6"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.668596 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" event={"ID":"4db84fcb-a010-4914-9975-9a99aafe6cb2","Type":"ContainerStarted","Data":"685bde127153ce2ea7f7024b95fe057d436d18ca6114a401b3434bfebfff4204"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.670094 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48" event={"ID":"4df04066-9f98-4c4e-88eb-704118e339d3","Type":"ContainerStarted","Data":"dbc1adf4114ddf0a04992502a56ff69c36df8b89040ae3185ed393e37bd364ae"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.671645 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" event={"ID":"69ffdb33-2a31-4487-bc6c-ea6919c4a5dc","Type":"ContainerStarted","Data":"2785af9669204f67dd0f3d9d98a8b3f313db858d465755c280aa695585f51cdd"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.672599 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" event={"ID":"f0fd5896-8b33-4fb3-a56c-1e7741ca29df","Type":"ContainerStarted","Data":"1231f1433520b6a5e26540e726032f4c8a7fe41d8e8cdf6c0cf36560fe7f1a8c"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.673707 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" event={"ID":"355cfe3d-0577-427d-a6fc-31df4e70e6d5","Type":"ContainerStarted","Data":"d8bce8eb7639c5c3de3b7485217c4d4bbcce1d89de920a5c9b8aa25e80c5f653"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.674737 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-flxzr" event={"ID":"29d9646d-bcc6-42a1-b082-286dc0bf9131","Type":"ContainerStarted","Data":"f18ec88e9d7fa34a1f658300a47908a0c562bf6d60e35b9b5ae8a23c0d5f9a24"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.675764 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-rgrlp" event={"ID":"ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e","Type":"ContainerStarted","Data":"59ddbfd06db5f9e4692891f19d87dc171eb7ae34597833f5386d3e135cc5fb0c"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.676691 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-xknd4" event={"ID":"326be887-d9c0-41bd-9eec-2c753701d197","Type":"ContainerStarted","Data":"e8ca3f3de3b1dc37506cf266cfc8b3480ff3d8bc282940789f20be7da8bdd656"} Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.728717 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.728949 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.22892157 +0000 UTC m=+155.420230289 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.729133 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.729544 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.229525017 +0000 UTC m=+155.420833766 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.830169 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.830378 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.330349764 +0000 UTC m=+155.521658473 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.830515 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.830851 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.330842278 +0000 UTC m=+155.522150987 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: W1202 14:18:43.916910 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9768e394_e777_4957_9c84_1a1c0e70cc9f.slice/crio-d20bfa95c4930f676a10d10a61cc88a545f281af05e1c0a8bdc42233d175e8b5 WatchSource:0}: Error finding container d20bfa95c4930f676a10d10a61cc88a545f281af05e1c0a8bdc42233d175e8b5: Status 404 returned error can't find the container with id d20bfa95c4930f676a10d10a61cc88a545f281af05e1c0a8bdc42233d175e8b5 Dec 02 14:18:43 crc kubenswrapper[4902]: W1202 14:18:43.923614 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod009e1183_d309_4988_b20c_54c056613558.slice/crio-d20326ebe7eb01d35afbe916c0e3fb59d56949243ce12076e30683d9553669e6 WatchSource:0}: Error finding container d20326ebe7eb01d35afbe916c0e3fb59d56949243ce12076e30683d9553669e6: Status 404 returned error can't find the container with id d20326ebe7eb01d35afbe916c0e3fb59d56949243ce12076e30683d9553669e6 Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.933083 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.933291 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.433257641 +0000 UTC m=+155.624566390 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:43 crc kubenswrapper[4902]: I1202 14:18:43.933355 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:43 crc kubenswrapper[4902]: E1202 14:18:43.933747 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.433731554 +0000 UTC m=+155.625040333 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.035926 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:44 crc kubenswrapper[4902]: E1202 14:18:44.036131 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.536100125 +0000 UTC m=+155.727408844 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.036388 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:44 crc kubenswrapper[4902]: E1202 14:18:44.036775 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.536761164 +0000 UTC m=+155.728069873 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.137993 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:44 crc kubenswrapper[4902]: E1202 14:18:44.138118 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.638092876 +0000 UTC m=+155.829401585 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.138170 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:44 crc kubenswrapper[4902]: E1202 14:18:44.138509 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.638499387 +0000 UTC m=+155.829808176 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.238628 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:44 crc kubenswrapper[4902]: E1202 14:18:44.238879 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.7388624 +0000 UTC m=+155.930171109 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.341058 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:44 crc kubenswrapper[4902]: E1202 14:18:44.341714 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.841690204 +0000 UTC m=+156.032998953 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.442447 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:44 crc kubenswrapper[4902]: E1202 14:18:44.442798 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:44.942782879 +0000 UTC m=+156.134091588 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.543866 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:44 crc kubenswrapper[4902]: E1202 14:18:44.544180 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:45.044161611 +0000 UTC m=+156.235470320 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.644910 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:44 crc kubenswrapper[4902]: E1202 14:18:44.645224 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:45.145170304 +0000 UTC m=+156.336479023 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.682399 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" event={"ID":"009e1183-d309-4988-b20c-54c056613558","Type":"ContainerStarted","Data":"d20326ebe7eb01d35afbe916c0e3fb59d56949243ce12076e30683d9553669e6"} Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.683494 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" event={"ID":"9768e394-e777-4957-9c84-1a1c0e70cc9f","Type":"ContainerStarted","Data":"d20bfa95c4930f676a10d10a61cc88a545f281af05e1c0a8bdc42233d175e8b5"} Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.684368 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-m8fcn" event={"ID":"30d9c634-f04e-4333-bdc6-083b2fdf5e14","Type":"ContainerStarted","Data":"267dc634e398ef6530b5696593acbfb0c78295e0133315b42f682ca9f1d45655"} Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.686203 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qmcz9" event={"ID":"b96aa7be-f972-4bd7-bd39-2d0a2017101c","Type":"ContainerStarted","Data":"8b5bca00e595dc9fdbf1782a0520eaa08381272f121debf1337e4617debcd29a"} Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.687117 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" event={"ID":"c5059510-c9f9-4945-8964-b74b62a6352d","Type":"ContainerStarted","Data":"2cc0a59f00aa7a2e52c8dbbdbc31f86db530467eee7a752296bd1402be8a662e"} Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.746546 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:44 crc kubenswrapper[4902]: E1202 14:18:44.747168 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:45.247141423 +0000 UTC m=+156.438450172 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.848386 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:44 crc kubenswrapper[4902]: E1202 14:18:44.848593 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:45.348524846 +0000 UTC m=+156.539833595 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.848762 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:44 crc kubenswrapper[4902]: E1202 14:18:44.849253 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:45.349227186 +0000 UTC m=+156.540535975 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.949999 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:44 crc kubenswrapper[4902]: E1202 14:18:44.950241 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:45.450208067 +0000 UTC m=+156.641516786 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:44 crc kubenswrapper[4902]: I1202 14:18:44.950422 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:44 crc kubenswrapper[4902]: E1202 14:18:44.950876 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:45.450855386 +0000 UTC m=+156.642164135 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.051767 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.052032 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:45.551983882 +0000 UTC m=+156.743292631 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.052100 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.052743 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:45.552720003 +0000 UTC m=+156.744028792 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.153897 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.154358 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:45.654324402 +0000 UTC m=+156.845633111 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.256302 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.256760 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:45.756744824 +0000 UTC m=+156.948053533 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.357206 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.357684 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:45.857657824 +0000 UTC m=+157.048966563 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.459524 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.460202 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:45.960173879 +0000 UTC m=+157.151482618 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.561002 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.561315 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.061269064 +0000 UTC m=+157.252577773 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.561767 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.562307 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.062292033 +0000 UTC m=+157.253600742 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.663154 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.663533 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.163489801 +0000 UTC m=+157.354798540 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.663657 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.663981 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.163967744 +0000 UTC m=+157.355276453 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.709851 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj" event={"ID":"2c2ef3a5-4e54-4162-bef2-60a40dc206f7","Type":"ContainerStarted","Data":"66cf2ec1e627b2e658c87fedb836ec8cc42730fe3d73512d5c040c4291e3e1fc"} Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.712167 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"aca0f5c3425f986f2acf2b801a56ed188094f0f91648268adc52de22a63a934a"} Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.765478 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.765685 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.265654546 +0000 UTC m=+157.456963255 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.765913 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.766245 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.266237363 +0000 UTC m=+157.457546072 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.866511 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.866758 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.366677799 +0000 UTC m=+157.557986518 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.866880 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.867369 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.367358268 +0000 UTC m=+157.558666997 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.968147 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.968457 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.468426342 +0000 UTC m=+157.659735091 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:45 crc kubenswrapper[4902]: I1202 14:18:45.968678 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:45 crc kubenswrapper[4902]: E1202 14:18:45.969233 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.469212164 +0000 UTC m=+157.660520913 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.069801 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.069974 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.569953419 +0000 UTC m=+157.761262138 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.070299 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.070779 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.570759632 +0000 UTC m=+157.762068381 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.172384 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.67233883 +0000 UTC m=+157.863647569 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.172755 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.173347 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.173982 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.673962787 +0000 UTC m=+157.865271526 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.274390 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.274639 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.774602688 +0000 UTC m=+157.965911427 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.274722 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.275193 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.775175455 +0000 UTC m=+157.966484194 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.375648 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.375827 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.875792516 +0000 UTC m=+158.067101255 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.376005 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.376417 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.876400193 +0000 UTC m=+158.067708942 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.477203 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.477638 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.977597891 +0000 UTC m=+158.168906650 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.477767 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.478260 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:46.978235519 +0000 UTC m=+158.169544258 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.579018 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.579530 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:47.079502699 +0000 UTC m=+158.270811448 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.681000 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.681632 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:47.181610822 +0000 UTC m=+158.372919561 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.719807 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8bcjv" event={"ID":"e7263104-6357-4d41-a133-faf27fb96fd4","Type":"ContainerStarted","Data":"ed5c0720557b37cc99e12d767b4387172f8bbc6edba471c9717c6f1c1cb17251"} Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.722034 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" event={"ID":"d0119aeb-1cd1-4503-ba6d-062f07f01491","Type":"ContainerStarted","Data":"440c41e10328d39b4cf48f7f2e8f6545c2a880b59976036258d6ecf8d5a40fe8"} Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.782350 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.782516 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:47.28248108 +0000 UTC m=+158.473789829 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.782683 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.783193 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:47.2831772 +0000 UTC m=+158.474485939 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.884047 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.884351 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:47.384314146 +0000 UTC m=+158.575622895 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.884406 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.884908 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:47.384891343 +0000 UTC m=+158.576200082 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:46 crc kubenswrapper[4902]: I1202 14:18:46.987203 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:46 crc kubenswrapper[4902]: E1202 14:18:46.988225 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:47.488196971 +0000 UTC m=+158.679505690 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.089828 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:47 crc kubenswrapper[4902]: E1202 14:18:47.090337 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:47.590318635 +0000 UTC m=+158.781627354 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.191603 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:47 crc kubenswrapper[4902]: E1202 14:18:47.192084 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:47.692033617 +0000 UTC m=+158.883342366 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.192189 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:47 crc kubenswrapper[4902]: E1202 14:18:47.192552 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:47.692535621 +0000 UTC m=+158.883844340 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.293185 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:47 crc kubenswrapper[4902]: E1202 14:18:47.293509 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:47.793470811 +0000 UTC m=+158.984779560 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.293620 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:47 crc kubenswrapper[4902]: E1202 14:18:47.294142 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:47.794116 +0000 UTC m=+158.985424749 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.395364 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:47 crc kubenswrapper[4902]: E1202 14:18:47.395630 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:47.895592205 +0000 UTC m=+159.086900954 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.395715 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:47 crc kubenswrapper[4902]: E1202 14:18:47.396221 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:47.896182962 +0000 UTC m=+159.087491701 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.498745 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:47 crc kubenswrapper[4902]: E1202 14:18:47.499426 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:47.999391987 +0000 UTC m=+159.190700736 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.600651 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:47 crc kubenswrapper[4902]: E1202 14:18:47.601033 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:48.101015657 +0000 UTC m=+159.292324376 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.702124 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:47 crc kubenswrapper[4902]: E1202 14:18:47.702534 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:48.202486432 +0000 UTC m=+159.393795191 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.702692 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:47 crc kubenswrapper[4902]: E1202 14:18:47.703158 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:48.20312102 +0000 UTC m=+159.394429839 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.728199 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" event={"ID":"93abc37a-cc70-4b86-bd3f-4d3945d029f4","Type":"ContainerStarted","Data":"1b13026644ff221069058e761e322813cfdd447ab43adebdf6a3e6fc963619f0"} Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.730284 4902 generic.go:334] "Generic (PLEG): container finished" podID="90af0e3f-43b4-4349-933b-1d9c22b76438" containerID="b829019ca110c297be9a5450031fb7276767805c047c5a021d5cbed705f6f475" exitCode=0 Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.730376 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" event={"ID":"90af0e3f-43b4-4349-933b-1d9c22b76438","Type":"ContainerDied","Data":"b829019ca110c297be9a5450031fb7276767805c047c5a021d5cbed705f6f475"} Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.732403 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" event={"ID":"f61a29cb-55b4-42b7-817f-67600cb70980","Type":"ContainerStarted","Data":"0d2437a5f93079b036c105e29106620a3c6bdd840882fbc69ee6c2eb9290f441"} Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.805500 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:47 crc kubenswrapper[4902]: E1202 14:18:47.805684 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:48.305645225 +0000 UTC m=+159.496953934 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.805819 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:47 crc kubenswrapper[4902]: E1202 14:18:47.806127 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:48.306119699 +0000 UTC m=+159.497428408 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:47 crc kubenswrapper[4902]: I1202 14:18:47.907163 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:47 crc kubenswrapper[4902]: E1202 14:18:47.907464 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:48.407427709 +0000 UTC m=+159.598736448 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.009355 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:48 crc kubenswrapper[4902]: E1202 14:18:48.010011 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:48.509978586 +0000 UTC m=+159.701287335 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.111217 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:48 crc kubenswrapper[4902]: E1202 14:18:48.111593 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:48.611504383 +0000 UTC m=+159.802813142 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.213377 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:48 crc kubenswrapper[4902]: E1202 14:18:48.213809 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:48.713793531 +0000 UTC m=+159.905102240 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.314429 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:48 crc kubenswrapper[4902]: E1202 14:18:48.315045 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:48.815024359 +0000 UTC m=+160.006333068 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.315474 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:48 crc kubenswrapper[4902]: E1202 14:18:48.315810 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:48.815802682 +0000 UTC m=+160.007111391 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.416246 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:48 crc kubenswrapper[4902]: E1202 14:18:48.416447 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:48.916420863 +0000 UTC m=+160.107729572 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.416928 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:48 crc kubenswrapper[4902]: E1202 14:18:48.417219 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:48.917206605 +0000 UTC m=+160.108515314 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.519051 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:48 crc kubenswrapper[4902]: E1202 14:18:48.519360 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:49.019329949 +0000 UTC m=+160.210638658 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.620500 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:48 crc kubenswrapper[4902]: E1202 14:18:48.621759 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:49.121733411 +0000 UTC m=+160.313042150 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.722036 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:48 crc kubenswrapper[4902]: E1202 14:18:48.723460 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:49.223420063 +0000 UTC m=+160.414728792 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.737615 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" event={"ID":"16d60e4a-c323-46b9-98ac-7ce9e1949e9f","Type":"ContainerStarted","Data":"06a3f30514550a489c50bfed3c558e5f4652d8006a6d653741857cedf262938e"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.739160 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" event={"ID":"69ffdb33-2a31-4487-bc6c-ea6919c4a5dc","Type":"ContainerStarted","Data":"4d648ac704120b1f798361cba098c61b0b94d8a94e6e10fd7a5d5222ea3f88ee"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.740549 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg" event={"ID":"9d0021ed-bd4b-49bd-a0a1-b175e794697e","Type":"ContainerStarted","Data":"44dde097492a04d8e8c3d6a91516823f8950162301107fb51fd2423a464024aa"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.741728 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jzjr6" event={"ID":"8b4b4ce5-869d-4047-b1d5-b4365ded3549","Type":"ContainerStarted","Data":"51162f29b96b22e459fc98ffef248a64de118e9c04540ba9ae34b96af36b7229"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.743080 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-69fxq" event={"ID":"55a80003-f940-44a9-8f12-55f887816397","Type":"ContainerStarted","Data":"d4e657538fb0a8e645f558c3be95bc34d84045ddba7278aa27eea32467cf5480"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.744420 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" event={"ID":"6244913f-facc-4fe2-be8c-09238b2704d6","Type":"ContainerStarted","Data":"7e9bdaf49bbb782053efa12c6f725ec71513a1856b581c37b2bd94ca86c7cbdd"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.745744 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q" event={"ID":"a21648d7-b40f-4da6-a228-c44b785c788a","Type":"ContainerStarted","Data":"afcb95fbed2788bf7b9f7f08aa803e0d1f9934da7ddb801e75e0953f214f7e90"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.747081 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht" event={"ID":"2996d85c-85fd-4462-9f50-b5dea4b5a762","Type":"ContainerStarted","Data":"9604850ff7968b528538bb723d6abdc75b4bb85133e9e2cec2e1d1480e56bd81"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.748552 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-xknd4" event={"ID":"326be887-d9c0-41bd-9eec-2c753701d197","Type":"ContainerStarted","Data":"6a93e04981357c5c3a77232ec17fb59f7cd0ea4e7a36ff8b727cb98bacd7650c"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.750278 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qmcz9" event={"ID":"b96aa7be-f972-4bd7-bd39-2d0a2017101c","Type":"ContainerStarted","Data":"897232053f2f9391065f04aaeb23662a058e5b602f1df3f6dc8f12e1ac521e2f"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.751399 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" event={"ID":"c5059510-c9f9-4945-8964-b74b62a6352d","Type":"ContainerStarted","Data":"aba92fd5c955a2cab29276e67ea475ddd56913795cfb342dc1f00a414d5af05d"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.755052 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-m8fcn" event={"ID":"30d9c634-f04e-4333-bdc6-083b2fdf5e14","Type":"ContainerStarted","Data":"035fa92211b78140c066ff96c5b186fadb2574d76f1505da8705f1fa5de4dbee"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.756323 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-svzpr" event={"ID":"e5dcbc97-fe74-4313-8205-4b8fd0eb2c13","Type":"ContainerStarted","Data":"aa2ee3f2bdb1303efe769b8240391af3cc2a574cdfb2fe49bf54792bd24d52f2"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.757676 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48" event={"ID":"4df04066-9f98-4c4e-88eb-704118e339d3","Type":"ContainerStarted","Data":"90de29a96f83f1e6bf833e20a62757d357d0eedbf300fb79886ff735855d61bc"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.759159 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" event={"ID":"b4f95734-55e8-4d38-8d4a-51f9876fa4e1","Type":"ContainerStarted","Data":"7ef94a49c2a44e0eb1cb1cae72f8154d811440908f3702ac5f187a9aeba797ce"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.761180 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-xxldq" event={"ID":"36555a35-fed5-4cc3-b0d1-940a5de68bfe","Type":"ContainerStarted","Data":"a985be18e7665727c7da514c1480cf806dbaedbd832a237226a0a773330dca5e"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.763186 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" event={"ID":"009e1183-d309-4988-b20c-54c056613558","Type":"ContainerStarted","Data":"bd82e8e7456dd61aae0d1a95226c70d19f5f97300fcb0eb992c4898f3f807ad0"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.765033 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t" event={"ID":"553e438c-3d4e-4252-9021-a3725ea43f81","Type":"ContainerStarted","Data":"f504eb4ea33a0cacd5f5df16ffebda9dc2cd7e7515e072a37959ba58ee947294"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.766411 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" event={"ID":"e530e0e8-55f8-4601-b587-2069a7d6fec8","Type":"ContainerStarted","Data":"d7f336479f89ffe6714e99205131d49620565e1551a2a89555c4f597b2ccf375"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.767905 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-pkj67" event={"ID":"58a8bd42-a740-4f79-bed8-87ec3ab27a0d","Type":"ContainerStarted","Data":"be4427441e2ed2cfda96f70315f223b925a7c588175c9d0444899b0bb2eef52e"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.769803 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-lgl7q" event={"ID":"b853669d-191c-4a38-96c8-290914042f96","Type":"ContainerStarted","Data":"30b730363d3d4d7a6ab7ed7e0e864d25cd760f273dacf6ed3a775b3a956fa8c5"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.771130 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" event={"ID":"9cb8cc94-1d13-4c57-9b12-bdd0ca3ba292","Type":"ContainerStarted","Data":"1da99a51c70100d91524b108fccbbd4b44c10e5025eda71a41409fa53cca471b"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.772775 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-flxzr" event={"ID":"29d9646d-bcc6-42a1-b082-286dc0bf9131","Type":"ContainerStarted","Data":"8e51ec7499affbd7de3e1d31bda4f2f55ba419618f5b1754a708c44409dfdef8"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.774261 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6" event={"ID":"9391167d-2391-4d04-a34a-6ac4aa19518b","Type":"ContainerStarted","Data":"eada562119ab6a6e1261910c0aa8eb47ed80737d997aed374f5ff87aeec20da1"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.775599 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42" event={"ID":"81329550-5c15-4ad3-bc03-49a3c0506f9f","Type":"ContainerStarted","Data":"04e169a4e5b8eb458d828493e871ecbe00c0bec372d9b69609b2531d8762c80e"} Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.824530 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:48 crc kubenswrapper[4902]: E1202 14:18:48.825081 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:49.325009512 +0000 UTC m=+160.516318261 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.925624 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:48 crc kubenswrapper[4902]: E1202 14:18:48.925828 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:49.425788127 +0000 UTC m=+160.617096876 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:48 crc kubenswrapper[4902]: I1202 14:18:48.926212 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:48 crc kubenswrapper[4902]: E1202 14:18:48.926583 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:49.426548849 +0000 UTC m=+160.617857568 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.027460 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:49 crc kubenswrapper[4902]: E1202 14:18:49.027603 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:49.527574152 +0000 UTC m=+160.718882871 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.028088 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:49 crc kubenswrapper[4902]: E1202 14:18:49.028740 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:49.528712334 +0000 UTC m=+160.720021083 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.128921 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:49 crc kubenswrapper[4902]: E1202 14:18:49.129183 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:49.62915672 +0000 UTC m=+160.820465429 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.230230 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:49 crc kubenswrapper[4902]: E1202 14:18:49.230549 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:49.730534023 +0000 UTC m=+160.921842732 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.331423 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:49 crc kubenswrapper[4902]: E1202 14:18:49.331595 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:49.831570296 +0000 UTC m=+161.022879005 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.331722 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:49 crc kubenswrapper[4902]: E1202 14:18:49.332072 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:49.83205454 +0000 UTC m=+161.023363249 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.432720 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:49 crc kubenswrapper[4902]: E1202 14:18:49.432960 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:49.932929339 +0000 UTC m=+161.124238058 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.433014 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:49 crc kubenswrapper[4902]: E1202 14:18:49.433311 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:49.933301399 +0000 UTC m=+161.124610208 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.533532 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:49 crc kubenswrapper[4902]: E1202 14:18:49.533961 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:50.033941781 +0000 UTC m=+161.225250490 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.635017 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:49 crc kubenswrapper[4902]: E1202 14:18:49.635454 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:50.135439847 +0000 UTC m=+161.326748556 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.736480 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:49 crc kubenswrapper[4902]: E1202 14:18:49.736734 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:50.236704857 +0000 UTC m=+161.428013566 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.737156 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:49 crc kubenswrapper[4902]: E1202 14:18:49.737518 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:50.237503269 +0000 UTC m=+161.428811988 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.795044 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-rgrlp" event={"ID":"ce62e788-ddf8-4a4e-ab48-67ca67fa3f6e","Type":"ContainerStarted","Data":"1a1517a51afad2347e8b128c7ffb091f308356f43c695fb45d74a76e0fcf035c"} Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.795877 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" event={"ID":"8f341c23-6370-47df-af45-d654197c980b","Type":"ContainerStarted","Data":"ba1424f52375d5bae374e0b9cec311f54015031c6a071051c2c432628a476c2e"} Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.797143 4902 generic.go:334] "Generic (PLEG): container finished" podID="9768e394-e777-4957-9c84-1a1c0e70cc9f" containerID="3e636f48182b7c89ac7e2989cc273f67d29aa8c0bcec11d1add7f81800fc0be8" exitCode=0 Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.797194 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" event={"ID":"9768e394-e777-4957-9c84-1a1c0e70cc9f","Type":"ContainerDied","Data":"3e636f48182b7c89ac7e2989cc273f67d29aa8c0bcec11d1add7f81800fc0be8"} Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.811271 4902 generic.go:334] "Generic (PLEG): container finished" podID="f0fd5896-8b33-4fb3-a56c-1e7741ca29df" containerID="77d22bf2473fac92b49615679419ed1dbc1035ef0ecda6bfcb3ce589cef49b16" exitCode=0 Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.811355 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" event={"ID":"f0fd5896-8b33-4fb3-a56c-1e7741ca29df","Type":"ContainerDied","Data":"77d22bf2473fac92b49615679419ed1dbc1035ef0ecda6bfcb3ce589cef49b16"} Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.816809 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" event={"ID":"3b0ec2e4-cbd7-4fdf-acf6-ebcda99749c8","Type":"ContainerStarted","Data":"51cdc12a8ede5ac1a5aab56a2781d0d92ea9c622707dd34b40363a4a207f2585"} Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.835936 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" event={"ID":"355cfe3d-0577-427d-a6fc-31df4e70e6d5","Type":"ContainerStarted","Data":"f750361acfdad8c70d387b75a660292c98310cbfe8e9a260206849e51b173475"} Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.839992 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:49 crc kubenswrapper[4902]: E1202 14:18:49.840120 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:50.340100367 +0000 UTC m=+161.531409076 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.840283 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:49 crc kubenswrapper[4902]: E1202 14:18:49.840740 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:50.340731725 +0000 UTC m=+161.532040434 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.848729 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" event={"ID":"4db84fcb-a010-4914-9975-9a99aafe6cb2","Type":"ContainerStarted","Data":"b2db2e77bfb1d2de23876dbc700d446df6b88fd9cd1e974f651655ca82b0e51c"} Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.858533 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-f4244" event={"ID":"54c71347-54d5-49cb-b5bb-12b0d607f8ad","Type":"ContainerStarted","Data":"fd0380f1c331de60f9486ba59c860f35b5aeac414e2aac6412382ee8c9f86af4"} Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.860647 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz" event={"ID":"67986385-6a35-4b14-b38f-cdee1c9a1b5d","Type":"ContainerStarted","Data":"51976c42e21c540b8d5892cae667079dd2601f1ddb8f4ba3af515a93c50b05a1"} Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.861635 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rxhgz" event={"ID":"07de0872-46f2-4a69-af4c-a811e7ee3a8d","Type":"ContainerStarted","Data":"c3df9dbdb6b29887ec7e83c39d19c0f536e07ec112824c4ae40dc3029bed568f"} Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.863810 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jzjr6" event={"ID":"8b4b4ce5-869d-4047-b1d5-b4365ded3549","Type":"ContainerStarted","Data":"41864ee92aaf25bf59f2635046c5890d42a7a673593ae485abac3c350387fe11"} Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.865973 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:49 crc kubenswrapper[4902]: I1202 14:18:49.941191 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:49 crc kubenswrapper[4902]: E1202 14:18:49.942188 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:50.442168349 +0000 UTC m=+161.633477058 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.043526 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:50 crc kubenswrapper[4902]: E1202 14:18:50.044028 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:50.544009785 +0000 UTC m=+161.735318494 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.049522 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kqkfj" podStartSLOduration=138.049502892 podStartE2EDuration="2m18.049502892s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:50.045187389 +0000 UTC m=+161.236496098" watchObservedRunningTime="2025-12-02 14:18:50.049502892 +0000 UTC m=+161.240811601" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.076156 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" podStartSLOduration=138.076136722 podStartE2EDuration="2m18.076136722s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:50.074707421 +0000 UTC m=+161.266016130" watchObservedRunningTime="2025-12-02 14:18:50.076136722 +0000 UTC m=+161.267445431" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.103771 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xp79t" podStartSLOduration=138.10375418 podStartE2EDuration="2m18.10375418s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:50.101261799 +0000 UTC m=+161.292570508" watchObservedRunningTime="2025-12-02 14:18:50.10375418 +0000 UTC m=+161.295062889" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.134192 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-8bcjv" podStartSLOduration=139.134174988 podStartE2EDuration="2m19.134174988s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:50.132305225 +0000 UTC m=+161.323613934" watchObservedRunningTime="2025-12-02 14:18:50.134174988 +0000 UTC m=+161.325483697" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.145683 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:50 crc kubenswrapper[4902]: E1202 14:18:50.145889 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:50.645865852 +0000 UTC m=+161.837174561 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.146204 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:50 crc kubenswrapper[4902]: E1202 14:18:50.146535 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:50.646522621 +0000 UTC m=+161.837831330 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.193604 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" podStartSLOduration=138.193585754 podStartE2EDuration="2m18.193585754s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:50.193063839 +0000 UTC m=+161.384372548" watchObservedRunningTime="2025-12-02 14:18:50.193585754 +0000 UTC m=+161.384894463" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.216097 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" podStartSLOduration=139.216080756 podStartE2EDuration="2m19.216080756s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:50.215216081 +0000 UTC m=+161.406524790" watchObservedRunningTime="2025-12-02 14:18:50.216080756 +0000 UTC m=+161.407389465" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.247481 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:50 crc kubenswrapper[4902]: E1202 14:18:50.247689 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:50.747659567 +0000 UTC m=+161.938968276 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.247729 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:50 crc kubenswrapper[4902]: E1202 14:18:50.248049 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:50.748041748 +0000 UTC m=+161.939350457 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.270792 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-v5x6s" podStartSLOduration=139.270775497 podStartE2EDuration="2m19.270775497s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:50.269095649 +0000 UTC m=+161.460404358" watchObservedRunningTime="2025-12-02 14:18:50.270775497 +0000 UTC m=+161.462084206" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.348624 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:50 crc kubenswrapper[4902]: E1202 14:18:50.348964 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:50.848938847 +0000 UTC m=+162.040247556 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.383578 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-pkj67" podStartSLOduration=139.383543354 podStartE2EDuration="2m19.383543354s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:50.32522063 +0000 UTC m=+161.516529339" watchObservedRunningTime="2025-12-02 14:18:50.383543354 +0000 UTC m=+161.574852063" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.384505 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wkjp6" podStartSLOduration=138.384499402 podStartE2EDuration="2m18.384499402s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:50.381655761 +0000 UTC m=+161.572964470" watchObservedRunningTime="2025-12-02 14:18:50.384499402 +0000 UTC m=+161.575808111" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.450962 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:50 crc kubenswrapper[4902]: E1202 14:18:50.451250 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:50.951238766 +0000 UTC m=+162.142547465 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.551798 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:50 crc kubenswrapper[4902]: E1202 14:18:50.552100 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:51.052077244 +0000 UTC m=+162.243385953 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.553014 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:50 crc kubenswrapper[4902]: E1202 14:18:50.553317 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:51.053308629 +0000 UTC m=+162.244617338 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.654550 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:50 crc kubenswrapper[4902]: E1202 14:18:50.654938 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:51.154923928 +0000 UTC m=+162.346232637 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.725199 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-xxldq" podStartSLOduration=139.725180293 podStartE2EDuration="2m19.725180293s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:50.430484634 +0000 UTC m=+161.621793343" watchObservedRunningTime="2025-12-02 14:18:50.725180293 +0000 UTC m=+161.916488992" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.755901 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:50 crc kubenswrapper[4902]: E1202 14:18:50.756233 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:51.256221929 +0000 UTC m=+162.447530628 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.785804 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-69fxq" podStartSLOduration=17.785786242 podStartE2EDuration="17.785786242s" podCreationTimestamp="2025-12-02 14:18:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:50.78534129 +0000 UTC m=+161.976649999" watchObservedRunningTime="2025-12-02 14:18:50.785786242 +0000 UTC m=+161.977094951" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.860547 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:50 crc kubenswrapper[4902]: E1202 14:18:50.861202 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:51.361185954 +0000 UTC m=+162.552494663 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.864496 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4wsv4"] Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.865395 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.868214 4902 patch_prober.go:28] interesting pod/console-operator-58897d9998-pkj67 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.868263 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-pkj67" podUID="58a8bd42-a740-4f79-bed8-87ec3ab27a0d" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.872594 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.902121 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4wsv4"] Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.905911 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.950255 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" podStartSLOduration=139.950206434 podStartE2EDuration="2m19.950206434s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:50.931909402 +0000 UTC m=+162.123218111" watchObservedRunningTime="2025-12-02 14:18:50.950206434 +0000 UTC m=+162.141515163" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.966092 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.966696 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-catalog-content\") pod \"certified-operators-4wsv4\" (UID: \"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6\") " pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.966752 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f479k\" (UniqueName: \"kubernetes.io/projected/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-kube-api-access-f479k\") pod \"certified-operators-4wsv4\" (UID: \"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6\") " pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.967337 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-utilities\") pod \"certified-operators-4wsv4\" (UID: \"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6\") " pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:18:50 crc kubenswrapper[4902]: E1202 14:18:50.976029 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:51.476013871 +0000 UTC m=+162.667322570 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:50 crc kubenswrapper[4902]: I1202 14:18:50.982225 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ks9ht" podStartSLOduration=139.982208417 podStartE2EDuration="2m19.982208417s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:50.9553242 +0000 UTC m=+162.146632909" watchObservedRunningTime="2025-12-02 14:18:50.982208417 +0000 UTC m=+162.173517126" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.037016 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-pkj67" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.051685 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.051880 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rxhgz" podStartSLOduration=139.051869245 podStartE2EDuration="2m19.051869245s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:51.016043353 +0000 UTC m=+162.207352062" watchObservedRunningTime="2025-12-02 14:18:51.051869245 +0000 UTC m=+162.243177954" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.056985 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lzbvn" podStartSLOduration=140.056967931 podStartE2EDuration="2m20.056967931s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:51.051449143 +0000 UTC m=+162.242757852" watchObservedRunningTime="2025-12-02 14:18:51.056967931 +0000 UTC m=+162.248276640" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.057467 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cg77b"] Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.058333 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.069714 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cg77b"] Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.069912 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.077336 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.077548 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-utilities\") pod \"certified-operators-4wsv4\" (UID: \"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6\") " pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:18:51 crc kubenswrapper[4902]: E1202 14:18:51.079326 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:51.579302238 +0000 UTC m=+162.770610947 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.102490 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.102543 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-catalog-content\") pod \"certified-operators-4wsv4\" (UID: \"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6\") " pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.102573 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f479k\" (UniqueName: \"kubernetes.io/projected/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-kube-api-access-f479k\") pod \"certified-operators-4wsv4\" (UID: \"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6\") " pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:18:51 crc kubenswrapper[4902]: E1202 14:18:51.104480 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:51.604466436 +0000 UTC m=+162.795775145 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.079929 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-utilities\") pod \"certified-operators-4wsv4\" (UID: \"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6\") " pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.105135 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-catalog-content\") pod \"certified-operators-4wsv4\" (UID: \"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6\") " pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.131966 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpl42" podStartSLOduration=140.13194947 podStartE2EDuration="2m20.13194947s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:51.079468063 +0000 UTC m=+162.270776772" watchObservedRunningTime="2025-12-02 14:18:51.13194947 +0000 UTC m=+162.323258179" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.170442 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-6n55q" podStartSLOduration=139.170428208 podStartE2EDuration="2m19.170428208s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:51.168863844 +0000 UTC m=+162.360172553" watchObservedRunningTime="2025-12-02 14:18:51.170428208 +0000 UTC m=+162.361736917" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.202818 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f479k\" (UniqueName: \"kubernetes.io/projected/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-kube-api-access-f479k\") pod \"certified-operators-4wsv4\" (UID: \"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6\") " pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.203331 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.203671 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab032431-54e6-42b3-b682-8c18f5f29f81-catalog-content\") pod \"community-operators-cg77b\" (UID: \"ab032431-54e6-42b3-b682-8c18f5f29f81\") " pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.203691 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab032431-54e6-42b3-b682-8c18f5f29f81-utilities\") pod \"community-operators-cg77b\" (UID: \"ab032431-54e6-42b3-b682-8c18f5f29f81\") " pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.203749 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crjqv\" (UniqueName: \"kubernetes.io/projected/ab032431-54e6-42b3-b682-8c18f5f29f81-kube-api-access-crjqv\") pod \"community-operators-cg77b\" (UID: \"ab032431-54e6-42b3-b682-8c18f5f29f81\") " pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:18:51 crc kubenswrapper[4902]: E1202 14:18:51.203865 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:51.703850592 +0000 UTC m=+162.895159301 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.204975 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.206685 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.210642 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-xknd4" podStartSLOduration=17.210612485 podStartE2EDuration="17.210612485s" podCreationTimestamp="2025-12-02 14:18:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:51.199839037 +0000 UTC m=+162.391147746" watchObservedRunningTime="2025-12-02 14:18:51.210612485 +0000 UTC m=+162.401921194" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.218130 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.218986 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.221297 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.246494 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-77jdb"] Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.247330 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.305234 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.305272 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crjqv\" (UniqueName: \"kubernetes.io/projected/ab032431-54e6-42b3-b682-8c18f5f29f81-kube-api-access-crjqv\") pod \"community-operators-cg77b\" (UID: \"ab032431-54e6-42b3-b682-8c18f5f29f81\") " pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.305322 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c22d338e-0daa-47be-a885-15aac0339cf4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c22d338e-0daa-47be-a885-15aac0339cf4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.305347 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab032431-54e6-42b3-b682-8c18f5f29f81-catalog-content\") pod \"community-operators-cg77b\" (UID: \"ab032431-54e6-42b3-b682-8c18f5f29f81\") " pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.305364 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c22d338e-0daa-47be-a885-15aac0339cf4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c22d338e-0daa-47be-a885-15aac0339cf4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.305380 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab032431-54e6-42b3-b682-8c18f5f29f81-utilities\") pod \"community-operators-cg77b\" (UID: \"ab032431-54e6-42b3-b682-8c18f5f29f81\") " pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.305796 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab032431-54e6-42b3-b682-8c18f5f29f81-utilities\") pod \"community-operators-cg77b\" (UID: \"ab032431-54e6-42b3-b682-8c18f5f29f81\") " pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:18:51 crc kubenswrapper[4902]: E1202 14:18:51.306185 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:51.806171532 +0000 UTC m=+162.997480241 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.306805 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab032431-54e6-42b3-b682-8c18f5f29f81-catalog-content\") pod \"community-operators-cg77b\" (UID: \"ab032431-54e6-42b3-b682-8c18f5f29f81\") " pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.327832 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.350996 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" podStartSLOduration=140.350957949 podStartE2EDuration="2m20.350957949s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:51.27631207 +0000 UTC m=+162.467620779" watchObservedRunningTime="2025-12-02 14:18:51.350957949 +0000 UTC m=+162.542266658" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.353353 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-77jdb"] Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.358517 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-rgrlp" podStartSLOduration=139.358497924 podStartE2EDuration="2m19.358497924s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:51.351762981 +0000 UTC m=+162.543071690" watchObservedRunningTime="2025-12-02 14:18:51.358497924 +0000 UTC m=+162.549806633" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.393269 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crjqv\" (UniqueName: \"kubernetes.io/projected/ab032431-54e6-42b3-b682-8c18f5f29f81-kube-api-access-crjqv\") pod \"community-operators-cg77b\" (UID: \"ab032431-54e6-42b3-b682-8c18f5f29f81\") " pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.414352 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.414657 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c22d338e-0daa-47be-a885-15aac0339cf4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c22d338e-0daa-47be-a885-15aac0339cf4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.414696 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c22d338e-0daa-47be-a885-15aac0339cf4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c22d338e-0daa-47be-a885-15aac0339cf4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.414754 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gxcd\" (UniqueName: \"kubernetes.io/projected/3a1c73ba-580d-4532-b7d7-8e263943bb06-kube-api-access-2gxcd\") pod \"certified-operators-77jdb\" (UID: \"3a1c73ba-580d-4532-b7d7-8e263943bb06\") " pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.414807 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a1c73ba-580d-4532-b7d7-8e263943bb06-catalog-content\") pod \"certified-operators-77jdb\" (UID: \"3a1c73ba-580d-4532-b7d7-8e263943bb06\") " pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.414825 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a1c73ba-580d-4532-b7d7-8e263943bb06-utilities\") pod \"certified-operators-77jdb\" (UID: \"3a1c73ba-580d-4532-b7d7-8e263943bb06\") " pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:18:51 crc kubenswrapper[4902]: E1202 14:18:51.414957 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:51.914927834 +0000 UTC m=+163.106236543 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.415340 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c22d338e-0daa-47be-a885-15aac0339cf4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c22d338e-0daa-47be-a885-15aac0339cf4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.451851 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" podStartSLOduration=139.451834317 podStartE2EDuration="2m19.451834317s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:51.449391487 +0000 UTC m=+162.640700196" watchObservedRunningTime="2025-12-02 14:18:51.451834317 +0000 UTC m=+162.643143026" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.479991 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.497898 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6shgl"] Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.499042 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.516605 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.516645 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a1c73ba-580d-4532-b7d7-8e263943bb06-catalog-content\") pod \"certified-operators-77jdb\" (UID: \"3a1c73ba-580d-4532-b7d7-8e263943bb06\") " pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.516666 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a1c73ba-580d-4532-b7d7-8e263943bb06-utilities\") pod \"certified-operators-77jdb\" (UID: \"3a1c73ba-580d-4532-b7d7-8e263943bb06\") " pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.516742 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gxcd\" (UniqueName: \"kubernetes.io/projected/3a1c73ba-580d-4532-b7d7-8e263943bb06-kube-api-access-2gxcd\") pod \"certified-operators-77jdb\" (UID: \"3a1c73ba-580d-4532-b7d7-8e263943bb06\") " pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:18:51 crc kubenswrapper[4902]: E1202 14:18:51.516917 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:52.016902643 +0000 UTC m=+163.208211342 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.517301 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a1c73ba-580d-4532-b7d7-8e263943bb06-catalog-content\") pod \"certified-operators-77jdb\" (UID: \"3a1c73ba-580d-4532-b7d7-8e263943bb06\") " pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.522919 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a1c73ba-580d-4532-b7d7-8e263943bb06-utilities\") pod \"certified-operators-77jdb\" (UID: \"3a1c73ba-580d-4532-b7d7-8e263943bb06\") " pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.556366 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c22d338e-0daa-47be-a885-15aac0339cf4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c22d338e-0daa-47be-a885-15aac0339cf4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.589473 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" podStartSLOduration=139.589457884 podStartE2EDuration="2m19.589457884s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:51.567297122 +0000 UTC m=+162.758605831" watchObservedRunningTime="2025-12-02 14:18:51.589457884 +0000 UTC m=+162.780766593" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.591692 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6shgl"] Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.602816 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.617769 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.617984 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqjlv\" (UniqueName: \"kubernetes.io/projected/4128c863-697c-459c-954f-f3226bd690ea-kube-api-access-kqjlv\") pod \"community-operators-6shgl\" (UID: \"4128c863-697c-459c-954f-f3226bd690ea\") " pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.618011 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4128c863-697c-459c-954f-f3226bd690ea-catalog-content\") pod \"community-operators-6shgl\" (UID: \"4128c863-697c-459c-954f-f3226bd690ea\") " pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.618042 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4128c863-697c-459c-954f-f3226bd690ea-utilities\") pod \"community-operators-6shgl\" (UID: \"4128c863-697c-459c-954f-f3226bd690ea\") " pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:18:51 crc kubenswrapper[4902]: E1202 14:18:51.618212 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:52.118195584 +0000 UTC m=+163.309504303 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.623276 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gxcd\" (UniqueName: \"kubernetes.io/projected/3a1c73ba-580d-4532-b7d7-8e263943bb06-kube-api-access-2gxcd\") pod \"certified-operators-77jdb\" (UID: \"3a1c73ba-580d-4532-b7d7-8e263943bb06\") " pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.652827 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.689873 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-szb48" podStartSLOduration=139.689856899 podStartE2EDuration="2m19.689856899s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:51.649266271 +0000 UTC m=+162.840574980" watchObservedRunningTime="2025-12-02 14:18:51.689856899 +0000 UTC m=+162.881165608" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.720616 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4128c863-697c-459c-954f-f3226bd690ea-utilities\") pod \"community-operators-6shgl\" (UID: \"4128c863-697c-459c-954f-f3226bd690ea\") " pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.720690 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.720742 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqjlv\" (UniqueName: \"kubernetes.io/projected/4128c863-697c-459c-954f-f3226bd690ea-kube-api-access-kqjlv\") pod \"community-operators-6shgl\" (UID: \"4128c863-697c-459c-954f-f3226bd690ea\") " pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.720767 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4128c863-697c-459c-954f-f3226bd690ea-catalog-content\") pod \"community-operators-6shgl\" (UID: \"4128c863-697c-459c-954f-f3226bd690ea\") " pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.721157 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4128c863-697c-459c-954f-f3226bd690ea-catalog-content\") pod \"community-operators-6shgl\" (UID: \"4128c863-697c-459c-954f-f3226bd690ea\") " pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.721370 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4128c863-697c-459c-954f-f3226bd690ea-utilities\") pod \"community-operators-6shgl\" (UID: \"4128c863-697c-459c-954f-f3226bd690ea\") " pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:18:51 crc kubenswrapper[4902]: E1202 14:18:51.721624 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:52.221612825 +0000 UTC m=+163.412921534 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.722327 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-xxldq" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.732743 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" podStartSLOduration=139.732724822 podStartE2EDuration="2m19.732724822s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:51.690399644 +0000 UTC m=+162.881708353" watchObservedRunningTime="2025-12-02 14:18:51.732724822 +0000 UTC m=+162.924033531" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.757273 4902 patch_prober.go:28] interesting pod/downloads-7954f5f757-xxldq container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.757313 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-xxldq" podUID="36555a35-fed5-4cc3-b0d1-940a5de68bfe" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.758918 4902 patch_prober.go:28] interesting pod/downloads-7954f5f757-xxldq container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.758967 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xxldq" podUID="36555a35-fed5-4cc3-b0d1-940a5de68bfe" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.759262 4902 patch_prober.go:28] interesting pod/downloads-7954f5f757-xxldq container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.759278 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xxldq" podUID="36555a35-fed5-4cc3-b0d1-940a5de68bfe" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.774295 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqjlv\" (UniqueName: \"kubernetes.io/projected/4128c863-697c-459c-954f-f3226bd690ea-kube-api-access-kqjlv\") pod \"community-operators-6shgl\" (UID: \"4128c863-697c-459c-954f-f3226bd690ea\") " pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.806431 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-lgl7q" podStartSLOduration=139.806410985 podStartE2EDuration="2m19.806410985s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:51.743962973 +0000 UTC m=+162.935271702" watchObservedRunningTime="2025-12-02 14:18:51.806410985 +0000 UTC m=+162.997719694" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.816631 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-gh8sw" podStartSLOduration=140.816612976 podStartE2EDuration="2m20.816612976s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:51.815283668 +0000 UTC m=+163.006592377" watchObservedRunningTime="2025-12-02 14:18:51.816612976 +0000 UTC m=+163.007921685" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.816682 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.817353 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.825376 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:51 crc kubenswrapper[4902]: E1202 14:18:51.826251 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:52.3262356 +0000 UTC m=+163.517544309 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.838597 4902 patch_prober.go:28] interesting pod/router-default-5444994796-lgl7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 14:18:51 crc kubenswrapper[4902]: [-]has-synced failed: reason withheld Dec 02 14:18:51 crc kubenswrapper[4902]: [+]process-running ok Dec 02 14:18:51 crc kubenswrapper[4902]: healthz check failed Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.838664 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lgl7q" podUID="b853669d-191c-4a38-96c8-290914042f96" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.849636 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.866878 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.871030 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.878612 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.879979 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.894003 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.909515 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tskrw" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.912494 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z86lr" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.914838 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-682gl" Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.926520 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:51 crc kubenswrapper[4902]: E1202 14:18:51.926801 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:52.42678964 +0000 UTC m=+163.618098349 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:51 crc kubenswrapper[4902]: I1202 14:18:51.960282 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" event={"ID":"93abc37a-cc70-4b86-bd3f-4d3945d029f4","Type":"ContainerStarted","Data":"727990651da20df9625403fd63ff006e80088f6124383138560f4f6768754e96"} Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.030356 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.031031 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qmcz9" event={"ID":"b96aa7be-f972-4bd7-bd39-2d0a2017101c","Type":"ContainerStarted","Data":"00e5ce005baf9bafba1d51e3aad7571fa5fe7426fa45285eb22a0dec03fd2d4a"} Dec 02 14:18:52 crc kubenswrapper[4902]: E1202 14:18:52.031810 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:52.531795366 +0000 UTC m=+163.723104075 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.111668 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4wsv4"] Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.132434 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:52 crc kubenswrapper[4902]: E1202 14:18:52.134172 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:52.634159707 +0000 UTC m=+163.825468416 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.233229 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:52 crc kubenswrapper[4902]: E1202 14:18:52.233775 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:52.733745888 +0000 UTC m=+163.925054597 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.252219 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cg77b"] Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.334484 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:52 crc kubenswrapper[4902]: E1202 14:18:52.335156 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:52.835144492 +0000 UTC m=+164.026453201 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.356166 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-77jdb"] Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.436071 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:52 crc kubenswrapper[4902]: E1202 14:18:52.436376 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:52.93636168 +0000 UTC m=+164.127670389 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.514018 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6shgl"] Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.537130 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:52 crc kubenswrapper[4902]: E1202 14:18:52.537529 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:53.037512676 +0000 UTC m=+164.228821385 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.638211 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:52 crc kubenswrapper[4902]: E1202 14:18:52.638834 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:53.138802247 +0000 UTC m=+164.330110956 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.638947 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:52 crc kubenswrapper[4902]: E1202 14:18:52.639313 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:53.139296681 +0000 UTC m=+164.330605390 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.673188 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 14:18:52 crc kubenswrapper[4902]: W1202 14:18:52.681990 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podc22d338e_0daa_47be_a885_15aac0339cf4.slice/crio-2bb1a86ec0a7f6fa863ea34d205bb10350e75ff7e31f93be3f24c99b47eeb822 WatchSource:0}: Error finding container 2bb1a86ec0a7f6fa863ea34d205bb10350e75ff7e31f93be3f24c99b47eeb822: Status 404 returned error can't find the container with id 2bb1a86ec0a7f6fa863ea34d205bb10350e75ff7e31f93be3f24c99b47eeb822 Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.740016 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:52 crc kubenswrapper[4902]: E1202 14:18:52.740223 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:53.24019621 +0000 UTC m=+164.431504919 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.740311 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:52 crc kubenswrapper[4902]: E1202 14:18:52.740629 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:53.240617192 +0000 UTC m=+164.431925901 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.824634 4902 patch_prober.go:28] interesting pod/router-default-5444994796-lgl7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 14:18:52 crc kubenswrapper[4902]: [-]has-synced failed: reason withheld Dec 02 14:18:52 crc kubenswrapper[4902]: [+]process-running ok Dec 02 14:18:52 crc kubenswrapper[4902]: healthz check failed Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.824692 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lgl7q" podUID="b853669d-191c-4a38-96c8-290914042f96" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.841665 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:52 crc kubenswrapper[4902]: E1202 14:18:52.842059 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:53.342045246 +0000 UTC m=+164.533353955 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:52 crc kubenswrapper[4902]: I1202 14:18:52.943995 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:52 crc kubenswrapper[4902]: E1202 14:18:52.944441 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:53.444426008 +0000 UTC m=+164.635734717 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.044795 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:53 crc kubenswrapper[4902]: E1202 14:18:53.045133 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:53.545117611 +0000 UTC m=+164.736426320 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.045719 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4f746"] Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.046838 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.049800 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.052031 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wsv4" event={"ID":"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6","Type":"ContainerStarted","Data":"01274fcf0cdf9ca5514b1e7bcfd1511be0c3904e921aa6482f1301da0a6bd104"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.052068 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wsv4" event={"ID":"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6","Type":"ContainerStarted","Data":"ba75cc2e9ce88ba42bc89f33f32fe6c7b20a1c03cc42b021d0cc86653e0aa51f"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.057099 4902 generic.go:334] "Generic (PLEG): container finished" podID="c5059510-c9f9-4945-8964-b74b62a6352d" containerID="aba92fd5c955a2cab29276e67ea475ddd56913795cfb342dc1f00a414d5af05d" exitCode=0 Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.057188 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" event={"ID":"c5059510-c9f9-4945-8964-b74b62a6352d","Type":"ContainerDied","Data":"aba92fd5c955a2cab29276e67ea475ddd56913795cfb342dc1f00a414d5af05d"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.059321 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz" event={"ID":"67986385-6a35-4b14-b38f-cdee1c9a1b5d","Type":"ContainerStarted","Data":"6a7024695f79c3935deb7acdb773312f0eeeee8c9b836669e2a9d2520156de03"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.059360 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.069610 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4f746"] Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.071975 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-svzpr" event={"ID":"e5dcbc97-fe74-4313-8205-4b8fd0eb2c13","Type":"ContainerStarted","Data":"64d6da1f0b66c0da5c66d891427edf6f40ba8998ccd7b8a9b4795b19dea5d734"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.074732 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" event={"ID":"90af0e3f-43b4-4349-933b-1d9c22b76438","Type":"ContainerStarted","Data":"e7ac106c51d76fcd2edf068e926930dd5f838d6e7c11b15d0f62818c83377b5b"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.089838 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" event={"ID":"4db84fcb-a010-4914-9975-9a99aafe6cb2","Type":"ContainerStarted","Data":"d5e51c1d9ae2cf9c9bb9b5191c9f069b0676f30de4458e76f48b14a8a70509e1"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.098079 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-77jdb" event={"ID":"3a1c73ba-580d-4532-b7d7-8e263943bb06","Type":"ContainerStarted","Data":"28a76e61b8cd97e5f86591f6fb5db562554c9d1394be8de648acbaf9760bb152"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.098109 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-77jdb" event={"ID":"3a1c73ba-580d-4532-b7d7-8e263943bb06","Type":"ContainerStarted","Data":"650fa069a575212a3364e069f763ec0606435103a3a7f509f0f550595f5645b6"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.125650 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz" podStartSLOduration=141.125633438 podStartE2EDuration="2m21.125633438s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:53.125066762 +0000 UTC m=+164.316375471" watchObservedRunningTime="2025-12-02 14:18:53.125633438 +0000 UTC m=+164.316942147" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.128820 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-flxzr" event={"ID":"29d9646d-bcc6-42a1-b082-286dc0bf9131","Type":"ContainerStarted","Data":"07caded21c4bacf1e029bc65717ce7e373517324d28395ad23ba423482b443c4"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.128916 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-flxzr" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.146125 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgq8p\" (UniqueName: \"kubernetes.io/projected/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-kube-api-access-sgq8p\") pod \"redhat-marketplace-4f746\" (UID: \"1467a1b3-3c32-4c86-a5ee-5e0f6f112519\") " pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.146186 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-utilities\") pod \"redhat-marketplace-4f746\" (UID: \"1467a1b3-3c32-4c86-a5ee-5e0f6f112519\") " pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.146236 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.146268 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-catalog-content\") pod \"redhat-marketplace-4f746\" (UID: \"1467a1b3-3c32-4c86-a5ee-5e0f6f112519\") " pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:18:53 crc kubenswrapper[4902]: E1202 14:18:53.146690 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:53.646675189 +0000 UTC m=+164.837983978 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.155625 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" event={"ID":"9768e394-e777-4957-9c84-1a1c0e70cc9f","Type":"ContainerStarted","Data":"d0d3913618478ec06d121fa620747afc7b0b172eb397c2b4cdc72a1753680343"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.164981 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-m8fcn" event={"ID":"30d9c634-f04e-4333-bdc6-083b2fdf5e14","Type":"ContainerStarted","Data":"13a87cfda721b73d01cdd4a8b2fc083c12b666e446e44ecac7fd49908c30f295"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.167152 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ppjnj" podStartSLOduration=141.167133992 podStartE2EDuration="2m21.167133992s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:53.164797386 +0000 UTC m=+164.356106095" watchObservedRunningTime="2025-12-02 14:18:53.167133992 +0000 UTC m=+164.358442701" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.181353 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6shgl" event={"ID":"4128c863-697c-459c-954f-f3226bd690ea","Type":"ContainerStarted","Data":"7b2a1de65fadda7cc117c3bf959c2507a77c974e3c6b2537edd26f4f51671d73"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.181410 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6shgl" event={"ID":"4128c863-697c-459c-954f-f3226bd690ea","Type":"ContainerStarted","Data":"4c8547cac2dae081991bc5c732b2c4e48b25511e7cc6e6f725279ffa4279f296"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.188046 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-flxzr" podStartSLOduration=19.188030399 podStartE2EDuration="19.188030399s" podCreationTimestamp="2025-12-02 14:18:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:53.186000811 +0000 UTC m=+164.377309550" watchObservedRunningTime="2025-12-02 14:18:53.188030399 +0000 UTC m=+164.379339108" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.193090 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg77b" event={"ID":"ab032431-54e6-42b3-b682-8c18f5f29f81","Type":"ContainerStarted","Data":"b3b764d182c126246007b2677b38c27d36932dbfa3bde3a68b497a535f6a7d31"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.193145 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg77b" event={"ID":"ab032431-54e6-42b3-b682-8c18f5f29f81","Type":"ContainerStarted","Data":"dfb9ff3c9c4aee50e8a0cd8980ba51d9d089d9a6c6d2417332cf5a1e16054514"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.199311 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c22d338e-0daa-47be-a885-15aac0339cf4","Type":"ContainerStarted","Data":"2bb1a86ec0a7f6fa863ea34d205bb10350e75ff7e31f93be3f24c99b47eeb822"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.201381 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" event={"ID":"b4f95734-55e8-4d38-8d4a-51f9876fa4e1","Type":"ContainerStarted","Data":"726d5ea7a39d5259a4247b49155bf39fe781a96a4bbeb47eeecca0ccd207c57c"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.205107 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" event={"ID":"16d60e4a-c323-46b9-98ac-7ce9e1949e9f","Type":"ContainerStarted","Data":"5f9957ac9eebbf094c26ba81d5451c39fca4abc0a70dd1ef9258643a0b895e7c"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.219374 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" event={"ID":"f0fd5896-8b33-4fb3-a56c-1e7741ca29df","Type":"ContainerStarted","Data":"6710f839f67613d7b1b65bced809fefe2dd093ab6d55f77ef64ae07899eb2e29"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.219496 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-m8fcn" podStartSLOduration=141.219481056 podStartE2EDuration="2m21.219481056s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:53.216759338 +0000 UTC m=+164.408068057" watchObservedRunningTime="2025-12-02 14:18:53.219481056 +0000 UTC m=+164.410789765" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.220016 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.222885 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg" event={"ID":"9d0021ed-bd4b-49bd-a0a1-b175e794697e","Type":"ContainerStarted","Data":"d9947a78e66db2a6dfbd54532634d92b9a2e57ac7d42289425cebe5285f13db1"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.246962 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.247275 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-catalog-content\") pod \"redhat-marketplace-4f746\" (UID: \"1467a1b3-3c32-4c86-a5ee-5e0f6f112519\") " pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.247425 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgq8p\" (UniqueName: \"kubernetes.io/projected/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-kube-api-access-sgq8p\") pod \"redhat-marketplace-4f746\" (UID: \"1467a1b3-3c32-4c86-a5ee-5e0f6f112519\") " pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.247505 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-utilities\") pod \"redhat-marketplace-4f746\" (UID: \"1467a1b3-3c32-4c86-a5ee-5e0f6f112519\") " pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.248025 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-utilities\") pod \"redhat-marketplace-4f746\" (UID: \"1467a1b3-3c32-4c86-a5ee-5e0f6f112519\") " pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:18:53 crc kubenswrapper[4902]: E1202 14:18:53.248715 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:53.74869081 +0000 UTC m=+164.939999549 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.249401 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-catalog-content\") pod \"redhat-marketplace-4f746\" (UID: \"1467a1b3-3c32-4c86-a5ee-5e0f6f112519\") " pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.249839 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jzjr6" event={"ID":"8b4b4ce5-869d-4047-b1d5-b4365ded3549","Type":"ContainerStarted","Data":"fca28a8d1c9f6b8f985e18f765eabd44411c073106fbbab66f7adbcb1089121a"} Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.250045 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" podStartSLOduration=141.250027588 podStartE2EDuration="2m21.250027588s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:53.246316062 +0000 UTC m=+164.437624781" watchObservedRunningTime="2025-12-02 14:18:53.250027588 +0000 UTC m=+164.441336297" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.299390 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9xtpg" podStartSLOduration=141.299372306 podStartE2EDuration="2m21.299372306s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:53.298783339 +0000 UTC m=+164.490092048" watchObservedRunningTime="2025-12-02 14:18:53.299372306 +0000 UTC m=+164.490681015" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.299485 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86crq" podStartSLOduration=142.299481529 podStartE2EDuration="2m22.299481529s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:53.274481406 +0000 UTC m=+164.465790115" watchObservedRunningTime="2025-12-02 14:18:53.299481529 +0000 UTC m=+164.490790238" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.326078 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgq8p\" (UniqueName: \"kubernetes.io/projected/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-kube-api-access-sgq8p\") pod \"redhat-marketplace-4f746\" (UID: \"1467a1b3-3c32-4c86-a5ee-5e0f6f112519\") " pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.348553 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.349221 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qm22x" podStartSLOduration=141.349185367 podStartE2EDuration="2m21.349185367s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:53.347927751 +0000 UTC m=+164.539236460" watchObservedRunningTime="2025-12-02 14:18:53.349185367 +0000 UTC m=+164.540494076" Dec 02 14:18:53 crc kubenswrapper[4902]: E1202 14:18:53.351176 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:53.851159213 +0000 UTC m=+165.042468002 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.363413 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.385324 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" podStartSLOduration=142.385304518 podStartE2EDuration="2m22.385304518s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:53.383342082 +0000 UTC m=+164.574650791" watchObservedRunningTime="2025-12-02 14:18:53.385304518 +0000 UTC m=+164.576613227" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.446332 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-mxmxt" podStartSLOduration=141.446313459 podStartE2EDuration="2m21.446313459s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:53.404519286 +0000 UTC m=+164.595827995" watchObservedRunningTime="2025-12-02 14:18:53.446313459 +0000 UTC m=+164.637622168" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.462457 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-qmcz9" podStartSLOduration=141.462435449 podStartE2EDuration="2m21.462435449s" podCreationTimestamp="2025-12-02 14:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:53.457527519 +0000 UTC m=+164.648836228" watchObservedRunningTime="2025-12-02 14:18:53.462435449 +0000 UTC m=+164.653744148" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.468811 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:53 crc kubenswrapper[4902]: E1202 14:18:53.469415 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:53.969396437 +0000 UTC m=+165.160705146 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.471061 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qwhpp"] Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.473368 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.480176 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qwhpp"] Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.493232 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jzjr6" podStartSLOduration=142.493215777 podStartE2EDuration="2m22.493215777s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:53.49227911 +0000 UTC m=+164.683587819" watchObservedRunningTime="2025-12-02 14:18:53.493215777 +0000 UTC m=+164.684524486" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.570190 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-catalog-content\") pod \"redhat-marketplace-qwhpp\" (UID: \"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7\") " pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.570533 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-utilities\") pod \"redhat-marketplace-qwhpp\" (UID: \"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7\") " pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.570633 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkbdz\" (UniqueName: \"kubernetes.io/projected/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-kube-api-access-kkbdz\") pod \"redhat-marketplace-qwhpp\" (UID: \"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7\") " pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.570684 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:53 crc kubenswrapper[4902]: E1202 14:18:53.570958 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.070945115 +0000 UTC m=+165.262253814 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.671934 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.672112 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-utilities\") pod \"redhat-marketplace-qwhpp\" (UID: \"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7\") " pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.672176 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkbdz\" (UniqueName: \"kubernetes.io/projected/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-kube-api-access-kkbdz\") pod \"redhat-marketplace-qwhpp\" (UID: \"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7\") " pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.672247 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-catalog-content\") pod \"redhat-marketplace-qwhpp\" (UID: \"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7\") " pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:18:53 crc kubenswrapper[4902]: E1202 14:18:53.673744 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.173721008 +0000 UTC m=+165.365029717 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.689683 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-catalog-content\") pod \"redhat-marketplace-qwhpp\" (UID: \"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7\") " pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.689740 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-utilities\") pod \"redhat-marketplace-qwhpp\" (UID: \"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7\") " pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.692776 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4f746"] Dec 02 14:18:53 crc kubenswrapper[4902]: W1202 14:18:53.720398 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1467a1b3_3c32_4c86_a5ee_5e0f6f112519.slice/crio-fccb285e25edc36d08bc42e627fb71094bb9f4c45bd0c161244754ae947b1288 WatchSource:0}: Error finding container fccb285e25edc36d08bc42e627fb71094bb9f4c45bd0c161244754ae947b1288: Status 404 returned error can't find the container with id fccb285e25edc36d08bc42e627fb71094bb9f4c45bd0c161244754ae947b1288 Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.735314 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkbdz\" (UniqueName: \"kubernetes.io/projected/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-kube-api-access-kkbdz\") pod \"redhat-marketplace-qwhpp\" (UID: \"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7\") " pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.773891 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:53 crc kubenswrapper[4902]: E1202 14:18:53.774289 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.274270087 +0000 UTC m=+165.465578796 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.824350 4902 patch_prober.go:28] interesting pod/router-default-5444994796-lgl7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 14:18:53 crc kubenswrapper[4902]: [-]has-synced failed: reason withheld Dec 02 14:18:53 crc kubenswrapper[4902]: [+]process-running ok Dec 02 14:18:53 crc kubenswrapper[4902]: healthz check failed Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.824419 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lgl7q" podUID="b853669d-191c-4a38-96c8-290914042f96" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.878937 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:53 crc kubenswrapper[4902]: E1202 14:18:53.879109 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.379080328 +0000 UTC m=+165.570389037 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.879409 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:53 crc kubenswrapper[4902]: E1202 14:18:53.879767 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.379750257 +0000 UTC m=+165.571058966 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.980593 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:53 crc kubenswrapper[4902]: E1202 14:18:53.980784 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.480756719 +0000 UTC m=+165.672065428 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.981090 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:53 crc kubenswrapper[4902]: E1202 14:18:53.981514 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.48149808 +0000 UTC m=+165.672806869 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:53 crc kubenswrapper[4902]: I1202 14:18:53.989637 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.036029 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-t89xz"] Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.037554 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.039493 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.046962 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t89xz"] Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.081737 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:54 crc kubenswrapper[4902]: E1202 14:18:54.082092 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.582061659 +0000 UTC m=+165.773370358 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.082382 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:54 crc kubenswrapper[4902]: E1202 14:18:54.082688 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.582681667 +0000 UTC m=+165.773990376 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.183055 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:54 crc kubenswrapper[4902]: E1202 14:18:54.183243 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.683217106 +0000 UTC m=+165.874525805 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.183329 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/caf53037-752b-4821-8e41-81976a0a0554-utilities\") pod \"redhat-operators-t89xz\" (UID: \"caf53037-752b-4821-8e41-81976a0a0554\") " pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.183363 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/caf53037-752b-4821-8e41-81976a0a0554-catalog-content\") pod \"redhat-operators-t89xz\" (UID: \"caf53037-752b-4821-8e41-81976a0a0554\") " pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.183450 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ktd7\" (UniqueName: \"kubernetes.io/projected/caf53037-752b-4821-8e41-81976a0a0554-kube-api-access-7ktd7\") pod \"redhat-operators-t89xz\" (UID: \"caf53037-752b-4821-8e41-81976a0a0554\") " pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.183490 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:54 crc kubenswrapper[4902]: E1202 14:18:54.183899 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.683881795 +0000 UTC m=+165.875190494 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.213489 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qwhpp"] Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.257260 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-f4244" event={"ID":"54c71347-54d5-49cb-b5bb-12b0d607f8ad","Type":"ContainerStarted","Data":"adb9d426f2ec9c627443fde84fb747a5a097033dc9e2b6d806aaddc8e673b98e"} Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.265144 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" event={"ID":"90af0e3f-43b4-4349-933b-1d9c22b76438","Type":"ContainerStarted","Data":"192e40e037d6b7b4093e0a44e1840c5de7c2c897fdb7d5a017e1d7cf755967a9"} Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.284441 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:54 crc kubenswrapper[4902]: E1202 14:18:54.284635 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.784604939 +0000 UTC m=+165.975913658 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.285809 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ktd7\" (UniqueName: \"kubernetes.io/projected/caf53037-752b-4821-8e41-81976a0a0554-kube-api-access-7ktd7\") pod \"redhat-operators-t89xz\" (UID: \"caf53037-752b-4821-8e41-81976a0a0554\") " pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.286244 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:54 crc kubenswrapper[4902]: E1202 14:18:54.286586 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.786574195 +0000 UTC m=+165.977882904 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.286874 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/caf53037-752b-4821-8e41-81976a0a0554-utilities\") pod \"redhat-operators-t89xz\" (UID: \"caf53037-752b-4821-8e41-81976a0a0554\") " pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.287345 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/caf53037-752b-4821-8e41-81976a0a0554-utilities\") pod \"redhat-operators-t89xz\" (UID: \"caf53037-752b-4821-8e41-81976a0a0554\") " pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.287425 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/caf53037-752b-4821-8e41-81976a0a0554-catalog-content\") pod \"redhat-operators-t89xz\" (UID: \"caf53037-752b-4821-8e41-81976a0a0554\") " pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.287737 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/caf53037-752b-4821-8e41-81976a0a0554-catalog-content\") pod \"redhat-operators-t89xz\" (UID: \"caf53037-752b-4821-8e41-81976a0a0554\") " pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.288079 4902 generic.go:334] "Generic (PLEG): container finished" podID="4128c863-697c-459c-954f-f3226bd690ea" containerID="7b2a1de65fadda7cc117c3bf959c2507a77c974e3c6b2537edd26f4f51671d73" exitCode=0 Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.288169 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6shgl" event={"ID":"4128c863-697c-459c-954f-f3226bd690ea","Type":"ContainerDied","Data":"7b2a1de65fadda7cc117c3bf959c2507a77c974e3c6b2537edd26f4f51671d73"} Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.291139 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.292368 4902 generic.go:334] "Generic (PLEG): container finished" podID="3a1c73ba-580d-4532-b7d7-8e263943bb06" containerID="28a76e61b8cd97e5f86591f6fb5db562554c9d1394be8de648acbaf9760bb152" exitCode=0 Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.292449 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-77jdb" event={"ID":"3a1c73ba-580d-4532-b7d7-8e263943bb06","Type":"ContainerDied","Data":"28a76e61b8cd97e5f86591f6fb5db562554c9d1394be8de648acbaf9760bb152"} Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.303514 4902 generic.go:334] "Generic (PLEG): container finished" podID="ab032431-54e6-42b3-b682-8c18f5f29f81" containerID="b3b764d182c126246007b2677b38c27d36932dbfa3bde3a68b497a535f6a7d31" exitCode=0 Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.303596 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg77b" event={"ID":"ab032431-54e6-42b3-b682-8c18f5f29f81","Type":"ContainerDied","Data":"b3b764d182c126246007b2677b38c27d36932dbfa3bde3a68b497a535f6a7d31"} Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.317194 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ktd7\" (UniqueName: \"kubernetes.io/projected/caf53037-752b-4821-8e41-81976a0a0554-kube-api-access-7ktd7\") pod \"redhat-operators-t89xz\" (UID: \"caf53037-752b-4821-8e41-81976a0a0554\") " pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.339778 4902 generic.go:334] "Generic (PLEG): container finished" podID="c22d338e-0daa-47be-a885-15aac0339cf4" containerID="be94f35d02011b672ee85383b04a0c3d5c302d6ba6e8bbe2ea1d3d0f090e41a4" exitCode=0 Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.339961 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c22d338e-0daa-47be-a885-15aac0339cf4","Type":"ContainerDied","Data":"be94f35d02011b672ee85383b04a0c3d5c302d6ba6e8bbe2ea1d3d0f090e41a4"} Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.346528 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qwhpp" event={"ID":"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7","Type":"ContainerStarted","Data":"377c353e611731ad8435f2faa8cd17370fb13dcffdeef306e9a775fea6f9098f"} Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.353017 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.375276 4902 generic.go:334] "Generic (PLEG): container finished" podID="6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" containerID="01274fcf0cdf9ca5514b1e7bcfd1511be0c3904e921aa6482f1301da0a6bd104" exitCode=0 Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.375393 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wsv4" event={"ID":"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6","Type":"ContainerDied","Data":"01274fcf0cdf9ca5514b1e7bcfd1511be0c3904e921aa6482f1301da0a6bd104"} Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.386461 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4f746" event={"ID":"1467a1b3-3c32-4c86-a5ee-5e0f6f112519","Type":"ContainerStarted","Data":"0fbb991ebaf715d2fbc0c9d4c239fc2f4ac92adc3488ec6406687b0c4b3ae331"} Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.386731 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4f746" event={"ID":"1467a1b3-3c32-4c86-a5ee-5e0f6f112519","Type":"ContainerStarted","Data":"fccb285e25edc36d08bc42e627fb71094bb9f4c45bd0c161244754ae947b1288"} Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.388371 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:54 crc kubenswrapper[4902]: E1202 14:18:54.388757 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.88873968 +0000 UTC m=+166.080048389 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.474358 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-svzpr" podStartSLOduration=143.474340223 podStartE2EDuration="2m23.474340223s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:54.471901453 +0000 UTC m=+165.663210172" watchObservedRunningTime="2025-12-02 14:18:54.474340223 +0000 UTC m=+165.665648932" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.480064 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9tzqn"] Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.482221 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.492159 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9tzqn"] Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.493512 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:54 crc kubenswrapper[4902]: E1202 14:18:54.496038 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:54.996025952 +0000 UTC m=+166.187334661 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.598243 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.598519 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-catalog-content\") pod \"redhat-operators-9tzqn\" (UID: \"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2\") " pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:18:54 crc kubenswrapper[4902]: E1202 14:18:54.598685 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:55.098664221 +0000 UTC m=+166.289972940 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.615537 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqsrs\" (UniqueName: \"kubernetes.io/projected/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-kube-api-access-nqsrs\") pod \"redhat-operators-9tzqn\" (UID: \"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2\") " pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.615722 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-utilities\") pod \"redhat-operators-9tzqn\" (UID: \"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2\") " pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.615806 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:54 crc kubenswrapper[4902]: E1202 14:18:54.616098 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:55.116083338 +0000 UTC m=+166.307392047 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.716786 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:54 crc kubenswrapper[4902]: E1202 14:18:54.716915 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:55.216890184 +0000 UTC m=+166.408198893 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.716946 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs\") pod \"network-metrics-daemon-tlnwn\" (UID: \"b63b8b19-f855-4038-891d-6bfd1e5021de\") " pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.716971 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-catalog-content\") pod \"redhat-operators-9tzqn\" (UID: \"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2\") " pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.716986 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqsrs\" (UniqueName: \"kubernetes.io/projected/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-kube-api-access-nqsrs\") pod \"redhat-operators-9tzqn\" (UID: \"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2\") " pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.717030 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-utilities\") pod \"redhat-operators-9tzqn\" (UID: \"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2\") " pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.717066 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:54 crc kubenswrapper[4902]: E1202 14:18:54.717313 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:55.217301416 +0000 UTC m=+166.408610125 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.719055 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-catalog-content\") pod \"redhat-operators-9tzqn\" (UID: \"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2\") " pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.719395 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-utilities\") pod \"redhat-operators-9tzqn\" (UID: \"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2\") " pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.726258 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b63b8b19-f855-4038-891d-6bfd1e5021de-metrics-certs\") pod \"network-metrics-daemon-tlnwn\" (UID: \"b63b8b19-f855-4038-891d-6bfd1e5021de\") " pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.739360 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqsrs\" (UniqueName: \"kubernetes.io/projected/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-kube-api-access-nqsrs\") pod \"redhat-operators-9tzqn\" (UID: \"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2\") " pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.784630 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.814275 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t89xz"] Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.817741 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:54 crc kubenswrapper[4902]: E1202 14:18:54.817905 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:55.317849165 +0000 UTC m=+166.509157874 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.818240 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:54 crc kubenswrapper[4902]: E1202 14:18:54.818626 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:55.318608696 +0000 UTC m=+166.509917405 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.821796 4902 patch_prober.go:28] interesting pod/router-default-5444994796-lgl7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 14:18:54 crc kubenswrapper[4902]: [-]has-synced failed: reason withheld Dec 02 14:18:54 crc kubenswrapper[4902]: [+]process-running ok Dec 02 14:18:54 crc kubenswrapper[4902]: healthz check failed Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.821851 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lgl7q" podUID="b853669d-191c-4a38-96c8-290914042f96" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.827546 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.862099 4902 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.919233 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c5059510-c9f9-4945-8964-b74b62a6352d-secret-volume\") pod \"c5059510-c9f9-4945-8964-b74b62a6352d\" (UID: \"c5059510-c9f9-4945-8964-b74b62a6352d\") " Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.919308 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84tj4\" (UniqueName: \"kubernetes.io/projected/c5059510-c9f9-4945-8964-b74b62a6352d-kube-api-access-84tj4\") pod \"c5059510-c9f9-4945-8964-b74b62a6352d\" (UID: \"c5059510-c9f9-4945-8964-b74b62a6352d\") " Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.919418 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.919492 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c5059510-c9f9-4945-8964-b74b62a6352d-config-volume\") pod \"c5059510-c9f9-4945-8964-b74b62a6352d\" (UID: \"c5059510-c9f9-4945-8964-b74b62a6352d\") " Dec 02 14:18:54 crc kubenswrapper[4902]: E1202 14:18:54.920266 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:55.420234315 +0000 UTC m=+166.611543024 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.920548 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5059510-c9f9-4945-8964-b74b62a6352d-config-volume" (OuterVolumeSpecName: "config-volume") pod "c5059510-c9f9-4945-8964-b74b62a6352d" (UID: "c5059510-c9f9-4945-8964-b74b62a6352d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.927141 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5059510-c9f9-4945-8964-b74b62a6352d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c5059510-c9f9-4945-8964-b74b62a6352d" (UID: "c5059510-c9f9-4945-8964-b74b62a6352d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:18:54 crc kubenswrapper[4902]: I1202 14:18:54.932310 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5059510-c9f9-4945-8964-b74b62a6352d-kube-api-access-84tj4" (OuterVolumeSpecName: "kube-api-access-84tj4") pod "c5059510-c9f9-4945-8964-b74b62a6352d" (UID: "c5059510-c9f9-4945-8964-b74b62a6352d"). InnerVolumeSpecName "kube-api-access-84tj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.023802 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tlnwn" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.029028 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.029092 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84tj4\" (UniqueName: \"kubernetes.io/projected/c5059510-c9f9-4945-8964-b74b62a6352d-kube-api-access-84tj4\") on node \"crc\" DevicePath \"\"" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.029102 4902 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c5059510-c9f9-4945-8964-b74b62a6352d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.029111 4902 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c5059510-c9f9-4945-8964-b74b62a6352d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 14:18:55 crc kubenswrapper[4902]: E1202 14:18:55.029359 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:55.529348449 +0000 UTC m=+166.720657158 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.137054 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:55 crc kubenswrapper[4902]: E1202 14:18:55.137544 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:55.637523856 +0000 UTC m=+166.828832575 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.143630 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9tzqn"] Dec 02 14:18:55 crc kubenswrapper[4902]: W1202 14:18:55.167131 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa4e35f6_253d_41cd_a8b1_bd4d30cbb1b2.slice/crio-2576f93c92aa2c4831de88bf5955c7f8352049152170dc4c905d05e7ab73c0bb WatchSource:0}: Error finding container 2576f93c92aa2c4831de88bf5955c7f8352049152170dc4c905d05e7ab73c0bb: Status 404 returned error can't find the container with id 2576f93c92aa2c4831de88bf5955c7f8352049152170dc4c905d05e7ab73c0bb Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.238220 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:55 crc kubenswrapper[4902]: E1202 14:18:55.238578 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:55.738543358 +0000 UTC m=+166.929852067 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.293542 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-tlnwn"] Dec 02 14:18:55 crc kubenswrapper[4902]: W1202 14:18:55.300025 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb63b8b19_f855_4038_891d_6bfd1e5021de.slice/crio-80f5f9aaa4f9c4bff12c200d2a8c2669743e3e0d2ad0611beaec7ff752114d78 WatchSource:0}: Error finding container 80f5f9aaa4f9c4bff12c200d2a8c2669743e3e0d2ad0611beaec7ff752114d78: Status 404 returned error can't find the container with id 80f5f9aaa4f9c4bff12c200d2a8c2669743e3e0d2ad0611beaec7ff752114d78 Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.341021 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:55 crc kubenswrapper[4902]: E1202 14:18:55.341365 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:55.841344552 +0000 UTC m=+167.032653271 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.366093 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.397555 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" event={"ID":"c5059510-c9f9-4945-8964-b74b62a6352d","Type":"ContainerDied","Data":"2cc0a59f00aa7a2e52c8dbbdbc31f86db530467eee7a752296bd1402be8a662e"} Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.397621 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2cc0a59f00aa7a2e52c8dbbdbc31f86db530467eee7a752296bd1402be8a662e" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.397653 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.399607 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9tzqn" event={"ID":"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2","Type":"ContainerStarted","Data":"2576f93c92aa2c4831de88bf5955c7f8352049152170dc4c905d05e7ab73c0bb"} Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.401777 4902 generic.go:334] "Generic (PLEG): container finished" podID="1467a1b3-3c32-4c86-a5ee-5e0f6f112519" containerID="0fbb991ebaf715d2fbc0c9d4c239fc2f4ac92adc3488ec6406687b0c4b3ae331" exitCode=0 Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.401818 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4f746" event={"ID":"1467a1b3-3c32-4c86-a5ee-5e0f6f112519","Type":"ContainerDied","Data":"0fbb991ebaf715d2fbc0c9d4c239fc2f4ac92adc3488ec6406687b0c4b3ae331"} Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.402904 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t89xz" event={"ID":"caf53037-752b-4821-8e41-81976a0a0554","Type":"ContainerStarted","Data":"a0cc29a23d2d1d9692481fd7f74a9f250548900fc75913c95c5c21bcf53058eb"} Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.404249 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" event={"ID":"b63b8b19-f855-4038-891d-6bfd1e5021de","Type":"ContainerStarted","Data":"80f5f9aaa4f9c4bff12c200d2a8c2669743e3e0d2ad0611beaec7ff752114d78"} Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.444887 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:55 crc kubenswrapper[4902]: E1202 14:18:55.446099 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:55.946085641 +0000 UTC m=+167.137394350 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.546062 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:55 crc kubenswrapper[4902]: E1202 14:18:55.546281 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 14:18:56.046233308 +0000 UTC m=+167.237542017 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.546421 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:55 crc kubenswrapper[4902]: E1202 14:18:55.546790 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 14:18:56.046779454 +0000 UTC m=+167.238088153 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2sdv9" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.556651 4902 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-02T14:18:54.862137869Z","Handler":null,"Name":""} Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.564376 4902 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.564415 4902 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.647172 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.650778 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.652075 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.677660 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" podStartSLOduration=144.677636988 podStartE2EDuration="2m24.677636988s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:18:55.44500023 +0000 UTC m=+166.636308939" watchObservedRunningTime="2025-12-02 14:18:55.677636988 +0000 UTC m=+166.868945697" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.748845 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c22d338e-0daa-47be-a885-15aac0339cf4-kubelet-dir\") pod \"c22d338e-0daa-47be-a885-15aac0339cf4\" (UID: \"c22d338e-0daa-47be-a885-15aac0339cf4\") " Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.748952 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c22d338e-0daa-47be-a885-15aac0339cf4-kube-api-access\") pod \"c22d338e-0daa-47be-a885-15aac0339cf4\" (UID: \"c22d338e-0daa-47be-a885-15aac0339cf4\") " Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.748982 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c22d338e-0daa-47be-a885-15aac0339cf4-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c22d338e-0daa-47be-a885-15aac0339cf4" (UID: "c22d338e-0daa-47be-a885-15aac0339cf4"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.749174 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.749231 4902 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c22d338e-0daa-47be-a885-15aac0339cf4-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.754335 4902 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.754372 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.755258 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c22d338e-0daa-47be-a885-15aac0339cf4-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c22d338e-0daa-47be-a885-15aac0339cf4" (UID: "c22d338e-0daa-47be-a885-15aac0339cf4"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.819499 4902 patch_prober.go:28] interesting pod/router-default-5444994796-lgl7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 14:18:55 crc kubenswrapper[4902]: [-]has-synced failed: reason withheld Dec 02 14:18:55 crc kubenswrapper[4902]: [+]process-running ok Dec 02 14:18:55 crc kubenswrapper[4902]: healthz check failed Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.819650 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lgl7q" podUID="b853669d-191c-4a38-96c8-290914042f96" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 14:18:55 crc kubenswrapper[4902]: I1202 14:18:55.850408 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c22d338e-0daa-47be-a885-15aac0339cf4-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.017025 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.017093 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.025224 4902 patch_prober.go:28] interesting pod/console-f9d7485db-8bcjv container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.34:8443/health\": dial tcp 10.217.0.34:8443: connect: connection refused" start-of-body= Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.025303 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-8bcjv" podUID="e7263104-6357-4d41-a133-faf27fb96fd4" containerName="console" probeResult="failure" output="Get \"https://10.217.0.34:8443/health\": dial tcp 10.217.0.34:8443: connect: connection refused" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.105041 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2sdv9\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.110821 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.115668 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.197624 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.197699 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.220947 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 14:18:56 crc kubenswrapper[4902]: E1202 14:18:56.221178 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5059510-c9f9-4945-8964-b74b62a6352d" containerName="collect-profiles" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.221196 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5059510-c9f9-4945-8964-b74b62a6352d" containerName="collect-profiles" Dec 02 14:18:56 crc kubenswrapper[4902]: E1202 14:18:56.221220 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c22d338e-0daa-47be-a885-15aac0339cf4" containerName="pruner" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.221228 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="c22d338e-0daa-47be-a885-15aac0339cf4" containerName="pruner" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.221349 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5059510-c9f9-4945-8964-b74b62a6352d" containerName="collect-profiles" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.221361 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="c22d338e-0daa-47be-a885-15aac0339cf4" containerName="pruner" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.221730 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.228924 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.229194 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.235861 4902 patch_prober.go:28] interesting pod/apiserver-76f77b778f-vm2hb container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 02 14:18:56 crc kubenswrapper[4902]: [+]log ok Dec 02 14:18:56 crc kubenswrapper[4902]: [+]etcd ok Dec 02 14:18:56 crc kubenswrapper[4902]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 02 14:18:56 crc kubenswrapper[4902]: [+]poststarthook/generic-apiserver-start-informers ok Dec 02 14:18:56 crc kubenswrapper[4902]: [+]poststarthook/max-in-flight-filter ok Dec 02 14:18:56 crc kubenswrapper[4902]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 02 14:18:56 crc kubenswrapper[4902]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 02 14:18:56 crc kubenswrapper[4902]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 02 14:18:56 crc kubenswrapper[4902]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 02 14:18:56 crc kubenswrapper[4902]: [+]poststarthook/project.openshift.io-projectcache ok Dec 02 14:18:56 crc kubenswrapper[4902]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 02 14:18:56 crc kubenswrapper[4902]: [+]poststarthook/openshift.io-startinformers ok Dec 02 14:18:56 crc kubenswrapper[4902]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 02 14:18:56 crc kubenswrapper[4902]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 02 14:18:56 crc kubenswrapper[4902]: livez check failed Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.235932 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" podUID="90af0e3f-43b4-4349-933b-1d9c22b76438" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.239490 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.277499 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a3a23ff1-98ea-4e87-ae57-a296afe170e3-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a3a23ff1-98ea-4e87-ae57-a296afe170e3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.277586 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a3a23ff1-98ea-4e87-ae57-a296afe170e3-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a3a23ff1-98ea-4e87-ae57-a296afe170e3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.378408 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.378833 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a3a23ff1-98ea-4e87-ae57-a296afe170e3-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a3a23ff1-98ea-4e87-ae57-a296afe170e3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.378943 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a3a23ff1-98ea-4e87-ae57-a296afe170e3-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a3a23ff1-98ea-4e87-ae57-a296afe170e3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.379058 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a3a23ff1-98ea-4e87-ae57-a296afe170e3-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a3a23ff1-98ea-4e87-ae57-a296afe170e3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.400120 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a3a23ff1-98ea-4e87-ae57-a296afe170e3-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a3a23ff1-98ea-4e87-ae57-a296afe170e3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.413632 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qwhpp" event={"ID":"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7","Type":"ContainerStarted","Data":"8660c7c3c261e6cc3a08edebb420b30cc0b126459fd0d3df509b2d5a8c6e96e5"} Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.418885 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c22d338e-0daa-47be-a885-15aac0339cf4","Type":"ContainerDied","Data":"2bb1a86ec0a7f6fa863ea34d205bb10350e75ff7e31f93be3f24c99b47eeb822"} Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.418924 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2bb1a86ec0a7f6fa863ea34d205bb10350e75ff7e31f93be3f24c99b47eeb822" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.419035 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.426683 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-f4244" event={"ID":"54c71347-54d5-49cb-b5bb-12b0d607f8ad","Type":"ContainerStarted","Data":"fcdd0099271ad752c635251a3087ae52c15051dbbbe34505ecac1cc1665589ac"} Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.554509 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.575138 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2sdv9"] Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.741925 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.785985 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 14:18:56 crc kubenswrapper[4902]: W1202 14:18:56.792229 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poda3a23ff1_98ea_4e87_ae57_a296afe170e3.slice/crio-2dbab9a6ad84ffa78b437333eaa218c9176d750a2bbc7c952692bcbe688660e8 WatchSource:0}: Error finding container 2dbab9a6ad84ffa78b437333eaa218c9176d750a2bbc7c952692bcbe688660e8: Status 404 returned error can't find the container with id 2dbab9a6ad84ffa78b437333eaa218c9176d750a2bbc7c952692bcbe688660e8 Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.825215 4902 patch_prober.go:28] interesting pod/router-default-5444994796-lgl7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 14:18:56 crc kubenswrapper[4902]: [-]has-synced failed: reason withheld Dec 02 14:18:56 crc kubenswrapper[4902]: [+]process-running ok Dec 02 14:18:56 crc kubenswrapper[4902]: healthz check failed Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.825696 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lgl7q" podUID="b853669d-191c-4a38-96c8-290914042f96" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.849218 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.849285 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:56 crc kubenswrapper[4902]: I1202 14:18:56.856921 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:57 crc kubenswrapper[4902]: I1202 14:18:57.117462 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 02 14:18:57 crc kubenswrapper[4902]: I1202 14:18:57.433756 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t89xz" event={"ID":"caf53037-752b-4821-8e41-81976a0a0554","Type":"ContainerStarted","Data":"3784fa7e88a204296515823395e6f7335a33353b3619c0e6af55527f65f1c6f0"} Dec 02 14:18:57 crc kubenswrapper[4902]: I1202 14:18:57.435029 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" event={"ID":"5af6da25-fad3-4ac3-bb6b-f8b2169f8992","Type":"ContainerStarted","Data":"938a0a2087b3aebd4e72a40d970c486ed388d06b3578532b0392ddda60d0c3ed"} Dec 02 14:18:57 crc kubenswrapper[4902]: I1202 14:18:57.437012 4902 generic.go:334] "Generic (PLEG): container finished" podID="f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" containerID="8660c7c3c261e6cc3a08edebb420b30cc0b126459fd0d3df509b2d5a8c6e96e5" exitCode=0 Dec 02 14:18:57 crc kubenswrapper[4902]: I1202 14:18:57.437065 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qwhpp" event={"ID":"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7","Type":"ContainerDied","Data":"8660c7c3c261e6cc3a08edebb420b30cc0b126459fd0d3df509b2d5a8c6e96e5"} Dec 02 14:18:57 crc kubenswrapper[4902]: I1202 14:18:57.438371 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a3a23ff1-98ea-4e87-ae57-a296afe170e3","Type":"ContainerStarted","Data":"2dbab9a6ad84ffa78b437333eaa218c9176d750a2bbc7c952692bcbe688660e8"} Dec 02 14:18:57 crc kubenswrapper[4902]: I1202 14:18:57.447627 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vf7qq" Dec 02 14:18:57 crc kubenswrapper[4902]: I1202 14:18:57.820848 4902 patch_prober.go:28] interesting pod/router-default-5444994796-lgl7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 14:18:57 crc kubenswrapper[4902]: [-]has-synced failed: reason withheld Dec 02 14:18:57 crc kubenswrapper[4902]: [+]process-running ok Dec 02 14:18:57 crc kubenswrapper[4902]: healthz check failed Dec 02 14:18:57 crc kubenswrapper[4902]: I1202 14:18:57.820945 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lgl7q" podUID="b853669d-191c-4a38-96c8-290914042f96" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 14:18:58 crc kubenswrapper[4902]: I1202 14:18:58.446876 4902 generic.go:334] "Generic (PLEG): container finished" podID="caf53037-752b-4821-8e41-81976a0a0554" containerID="3784fa7e88a204296515823395e6f7335a33353b3619c0e6af55527f65f1c6f0" exitCode=0 Dec 02 14:18:58 crc kubenswrapper[4902]: I1202 14:18:58.446972 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t89xz" event={"ID":"caf53037-752b-4821-8e41-81976a0a0554","Type":"ContainerDied","Data":"3784fa7e88a204296515823395e6f7335a33353b3619c0e6af55527f65f1c6f0"} Dec 02 14:18:58 crc kubenswrapper[4902]: I1202 14:18:58.449823 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" event={"ID":"b63b8b19-f855-4038-891d-6bfd1e5021de","Type":"ContainerStarted","Data":"3b5759eb2ca346c60080678d336207811e373ccbff782e3b9b71808cc07bdf9c"} Dec 02 14:18:58 crc kubenswrapper[4902]: I1202 14:18:58.451626 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9tzqn" event={"ID":"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2","Type":"ContainerStarted","Data":"add58b79d77c262f4e34f258003ac5593059c0afef141ab49281617205bbaf01"} Dec 02 14:18:58 crc kubenswrapper[4902]: I1202 14:18:58.820418 4902 patch_prober.go:28] interesting pod/router-default-5444994796-lgl7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 14:18:58 crc kubenswrapper[4902]: [-]has-synced failed: reason withheld Dec 02 14:18:58 crc kubenswrapper[4902]: [+]process-running ok Dec 02 14:18:58 crc kubenswrapper[4902]: healthz check failed Dec 02 14:18:58 crc kubenswrapper[4902]: I1202 14:18:58.820480 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lgl7q" podUID="b853669d-191c-4a38-96c8-290914042f96" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 14:18:59 crc kubenswrapper[4902]: I1202 14:18:59.670782 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-flxzr" Dec 02 14:18:59 crc kubenswrapper[4902]: I1202 14:18:59.820304 4902 patch_prober.go:28] interesting pod/router-default-5444994796-lgl7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 14:18:59 crc kubenswrapper[4902]: [-]has-synced failed: reason withheld Dec 02 14:18:59 crc kubenswrapper[4902]: [+]process-running ok Dec 02 14:18:59 crc kubenswrapper[4902]: healthz check failed Dec 02 14:18:59 crc kubenswrapper[4902]: I1202 14:18:59.820751 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lgl7q" podUID="b853669d-191c-4a38-96c8-290914042f96" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 14:19:00 crc kubenswrapper[4902]: I1202 14:19:00.532438 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:19:00 crc kubenswrapper[4902]: I1202 14:19:00.538218 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:19:00 crc kubenswrapper[4902]: I1202 14:19:00.819633 4902 patch_prober.go:28] interesting pod/router-default-5444994796-lgl7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 14:19:00 crc kubenswrapper[4902]: [-]has-synced failed: reason withheld Dec 02 14:19:00 crc kubenswrapper[4902]: [+]process-running ok Dec 02 14:19:00 crc kubenswrapper[4902]: healthz check failed Dec 02 14:19:00 crc kubenswrapper[4902]: I1202 14:19:00.819702 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lgl7q" podUID="b853669d-191c-4a38-96c8-290914042f96" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 14:19:01 crc kubenswrapper[4902]: I1202 14:19:01.186715 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:19:01 crc kubenswrapper[4902]: I1202 14:19:01.191167 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-vm2hb" Dec 02 14:19:01 crc kubenswrapper[4902]: I1202 14:19:01.480634 4902 generic.go:334] "Generic (PLEG): container finished" podID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" containerID="add58b79d77c262f4e34f258003ac5593059c0afef141ab49281617205bbaf01" exitCode=0 Dec 02 14:19:01 crc kubenswrapper[4902]: I1202 14:19:01.480699 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9tzqn" event={"ID":"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2","Type":"ContainerDied","Data":"add58b79d77c262f4e34f258003ac5593059c0afef141ab49281617205bbaf01"} Dec 02 14:19:01 crc kubenswrapper[4902]: I1202 14:19:01.732023 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-xxldq" Dec 02 14:19:01 crc kubenswrapper[4902]: I1202 14:19:01.818964 4902 patch_prober.go:28] interesting pod/router-default-5444994796-lgl7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 14:19:01 crc kubenswrapper[4902]: [-]has-synced failed: reason withheld Dec 02 14:19:01 crc kubenswrapper[4902]: [+]process-running ok Dec 02 14:19:01 crc kubenswrapper[4902]: healthz check failed Dec 02 14:19:01 crc kubenswrapper[4902]: I1202 14:19:01.819020 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lgl7q" podUID="b853669d-191c-4a38-96c8-290914042f96" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 14:19:02 crc kubenswrapper[4902]: I1202 14:19:02.488710 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" event={"ID":"5af6da25-fad3-4ac3-bb6b-f8b2169f8992","Type":"ContainerStarted","Data":"fbaf5a37fa423b9cb0e1593e19d6c30219196e56f6536454aaa5ad1c897003b8"} Dec 02 14:19:02 crc kubenswrapper[4902]: I1202 14:19:02.817976 4902 patch_prober.go:28] interesting pod/router-default-5444994796-lgl7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 14:19:02 crc kubenswrapper[4902]: [-]has-synced failed: reason withheld Dec 02 14:19:02 crc kubenswrapper[4902]: [+]process-running ok Dec 02 14:19:02 crc kubenswrapper[4902]: healthz check failed Dec 02 14:19:02 crc kubenswrapper[4902]: I1202 14:19:02.818057 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lgl7q" podUID="b853669d-191c-4a38-96c8-290914042f96" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 14:19:03 crc kubenswrapper[4902]: I1202 14:19:03.495339 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a3a23ff1-98ea-4e87-ae57-a296afe170e3","Type":"ContainerStarted","Data":"6f76e83594c3fea07d4ee03874fdb34e4d91042dbe4f2c8aead0b284aab9cc8a"} Dec 02 14:19:03 crc kubenswrapper[4902]: I1202 14:19:03.818364 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:19:03 crc kubenswrapper[4902]: I1202 14:19:03.822270 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-lgl7q" Dec 02 14:19:04 crc kubenswrapper[4902]: I1202 14:19:04.731472 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:19:04 crc kubenswrapper[4902]: I1202 14:19:04.731533 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:19:05 crc kubenswrapper[4902]: I1202 14:19:05.507507 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-f4244" event={"ID":"54c71347-54d5-49cb-b5bb-12b0d607f8ad","Type":"ContainerStarted","Data":"456a0115d33f9a53a8a623e88737e2d23c29b0fdfd228e88325ed641fa3b2e60"} Dec 02 14:19:05 crc kubenswrapper[4902]: I1202 14:19:05.508166 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:19:05 crc kubenswrapper[4902]: I1202 14:19:05.525057 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=9.525041927 podStartE2EDuration="9.525041927s" podCreationTimestamp="2025-12-02 14:18:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:19:05.524345968 +0000 UTC m=+176.715654707" watchObservedRunningTime="2025-12-02 14:19:05.525041927 +0000 UTC m=+176.716350626" Dec 02 14:19:05 crc kubenswrapper[4902]: I1202 14:19:05.548142 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" podStartSLOduration=154.548121926 podStartE2EDuration="2m34.548121926s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:19:05.543928026 +0000 UTC m=+176.735236745" watchObservedRunningTime="2025-12-02 14:19:05.548121926 +0000 UTC m=+176.739430645" Dec 02 14:19:06 crc kubenswrapper[4902]: I1202 14:19:06.017110 4902 patch_prober.go:28] interesting pod/console-f9d7485db-8bcjv container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.34:8443/health\": dial tcp 10.217.0.34:8443: connect: connection refused" start-of-body= Dec 02 14:19:06 crc kubenswrapper[4902]: I1202 14:19:06.017476 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-8bcjv" podUID="e7263104-6357-4d41-a133-faf27fb96fd4" containerName="console" probeResult="failure" output="Get \"https://10.217.0.34:8443/health\": dial tcp 10.217.0.34:8443: connect: connection refused" Dec 02 14:19:06 crc kubenswrapper[4902]: I1202 14:19:06.516279 4902 generic.go:334] "Generic (PLEG): container finished" podID="a3a23ff1-98ea-4e87-ae57-a296afe170e3" containerID="6f76e83594c3fea07d4ee03874fdb34e4d91042dbe4f2c8aead0b284aab9cc8a" exitCode=0 Dec 02 14:19:06 crc kubenswrapper[4902]: I1202 14:19:06.516339 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a3a23ff1-98ea-4e87-ae57-a296afe170e3","Type":"ContainerDied","Data":"6f76e83594c3fea07d4ee03874fdb34e4d91042dbe4f2c8aead0b284aab9cc8a"} Dec 02 14:19:06 crc kubenswrapper[4902]: I1202 14:19:06.541610 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-f4244" podStartSLOduration=33.541588783 podStartE2EDuration="33.541588783s" podCreationTimestamp="2025-12-02 14:18:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:19:06.535358445 +0000 UTC m=+177.726667154" watchObservedRunningTime="2025-12-02 14:19:06.541588783 +0000 UTC m=+177.732897482" Dec 02 14:19:15 crc kubenswrapper[4902]: I1202 14:19:15.393918 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 14:19:15 crc kubenswrapper[4902]: I1202 14:19:15.479356 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 14:19:15 crc kubenswrapper[4902]: I1202 14:19:15.571094 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a3a23ff1-98ea-4e87-ae57-a296afe170e3","Type":"ContainerDied","Data":"2dbab9a6ad84ffa78b437333eaa218c9176d750a2bbc7c952692bcbe688660e8"} Dec 02 14:19:15 crc kubenswrapper[4902]: I1202 14:19:15.571556 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2dbab9a6ad84ffa78b437333eaa218c9176d750a2bbc7c952692bcbe688660e8" Dec 02 14:19:15 crc kubenswrapper[4902]: I1202 14:19:15.571252 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 14:19:15 crc kubenswrapper[4902]: I1202 14:19:15.665127 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a3a23ff1-98ea-4e87-ae57-a296afe170e3-kube-api-access\") pod \"a3a23ff1-98ea-4e87-ae57-a296afe170e3\" (UID: \"a3a23ff1-98ea-4e87-ae57-a296afe170e3\") " Dec 02 14:19:15 crc kubenswrapper[4902]: I1202 14:19:15.665240 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a3a23ff1-98ea-4e87-ae57-a296afe170e3-kubelet-dir\") pod \"a3a23ff1-98ea-4e87-ae57-a296afe170e3\" (UID: \"a3a23ff1-98ea-4e87-ae57-a296afe170e3\") " Dec 02 14:19:15 crc kubenswrapper[4902]: I1202 14:19:15.665399 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a3a23ff1-98ea-4e87-ae57-a296afe170e3-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a3a23ff1-98ea-4e87-ae57-a296afe170e3" (UID: "a3a23ff1-98ea-4e87-ae57-a296afe170e3"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:19:15 crc kubenswrapper[4902]: I1202 14:19:15.665817 4902 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a3a23ff1-98ea-4e87-ae57-a296afe170e3-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 14:19:15 crc kubenswrapper[4902]: I1202 14:19:15.670299 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3a23ff1-98ea-4e87-ae57-a296afe170e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a3a23ff1-98ea-4e87-ae57-a296afe170e3" (UID: "a3a23ff1-98ea-4e87-ae57-a296afe170e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:19:15 crc kubenswrapper[4902]: I1202 14:19:15.767523 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a3a23ff1-98ea-4e87-ae57-a296afe170e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 14:19:16 crc kubenswrapper[4902]: I1202 14:19:16.022225 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:19:16 crc kubenswrapper[4902]: I1202 14:19:16.028219 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:19:16 crc kubenswrapper[4902]: I1202 14:19:16.492230 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:19:28 crc kubenswrapper[4902]: E1202 14:19:28.253207 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 02 14:19:28 crc kubenswrapper[4902]: E1202 14:19:28.253799 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sgq8p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-4f746_openshift-marketplace(1467a1b3-3c32-4c86-a5ee-5e0f6f112519): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 14:19:28 crc kubenswrapper[4902]: E1202 14:19:28.254946 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-4f746" podUID="1467a1b3-3c32-4c86-a5ee-5e0f6f112519" Dec 02 14:19:31 crc kubenswrapper[4902]: I1202 14:19:31.893399 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-fjmsz" Dec 02 14:19:32 crc kubenswrapper[4902]: I1202 14:19:32.816227 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 14:19:32 crc kubenswrapper[4902]: E1202 14:19:32.816631 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3a23ff1-98ea-4e87-ae57-a296afe170e3" containerName="pruner" Dec 02 14:19:32 crc kubenswrapper[4902]: I1202 14:19:32.816648 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3a23ff1-98ea-4e87-ae57-a296afe170e3" containerName="pruner" Dec 02 14:19:32 crc kubenswrapper[4902]: I1202 14:19:32.816784 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3a23ff1-98ea-4e87-ae57-a296afe170e3" containerName="pruner" Dec 02 14:19:32 crc kubenswrapper[4902]: I1202 14:19:32.817317 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 14:19:32 crc kubenswrapper[4902]: I1202 14:19:32.819137 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 02 14:19:32 crc kubenswrapper[4902]: I1202 14:19:32.819269 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 02 14:19:32 crc kubenswrapper[4902]: I1202 14:19:32.826424 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 14:19:32 crc kubenswrapper[4902]: I1202 14:19:32.862537 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/da07947e-84ca-4cc6-9e55-f6b99bfca9c6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"da07947e-84ca-4cc6-9e55-f6b99bfca9c6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 14:19:32 crc kubenswrapper[4902]: I1202 14:19:32.862605 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/da07947e-84ca-4cc6-9e55-f6b99bfca9c6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"da07947e-84ca-4cc6-9e55-f6b99bfca9c6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 14:19:32 crc kubenswrapper[4902]: I1202 14:19:32.963193 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/da07947e-84ca-4cc6-9e55-f6b99bfca9c6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"da07947e-84ca-4cc6-9e55-f6b99bfca9c6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 14:19:32 crc kubenswrapper[4902]: I1202 14:19:32.963264 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/da07947e-84ca-4cc6-9e55-f6b99bfca9c6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"da07947e-84ca-4cc6-9e55-f6b99bfca9c6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 14:19:32 crc kubenswrapper[4902]: I1202 14:19:32.963418 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/da07947e-84ca-4cc6-9e55-f6b99bfca9c6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"da07947e-84ca-4cc6-9e55-f6b99bfca9c6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 14:19:32 crc kubenswrapper[4902]: I1202 14:19:32.990992 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/da07947e-84ca-4cc6-9e55-f6b99bfca9c6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"da07947e-84ca-4cc6-9e55-f6b99bfca9c6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 14:19:33 crc kubenswrapper[4902]: I1202 14:19:33.158874 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 14:19:34 crc kubenswrapper[4902]: I1202 14:19:34.731305 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:19:34 crc kubenswrapper[4902]: I1202 14:19:34.731771 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:19:38 crc kubenswrapper[4902]: I1202 14:19:38.022261 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 14:19:38 crc kubenswrapper[4902]: I1202 14:19:38.023476 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 14:19:38 crc kubenswrapper[4902]: I1202 14:19:38.028807 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 14:19:38 crc kubenswrapper[4902]: I1202 14:19:38.127404 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33461604-9b05-4934-90f2-1642fdd1c40a-kube-api-access\") pod \"installer-9-crc\" (UID: \"33461604-9b05-4934-90f2-1642fdd1c40a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 14:19:38 crc kubenswrapper[4902]: I1202 14:19:38.127746 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/33461604-9b05-4934-90f2-1642fdd1c40a-var-lock\") pod \"installer-9-crc\" (UID: \"33461604-9b05-4934-90f2-1642fdd1c40a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 14:19:38 crc kubenswrapper[4902]: I1202 14:19:38.127983 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/33461604-9b05-4934-90f2-1642fdd1c40a-kubelet-dir\") pod \"installer-9-crc\" (UID: \"33461604-9b05-4934-90f2-1642fdd1c40a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 14:19:38 crc kubenswrapper[4902]: I1202 14:19:38.234709 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/33461604-9b05-4934-90f2-1642fdd1c40a-var-lock\") pod \"installer-9-crc\" (UID: \"33461604-9b05-4934-90f2-1642fdd1c40a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 14:19:38 crc kubenswrapper[4902]: I1202 14:19:38.234619 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/33461604-9b05-4934-90f2-1642fdd1c40a-var-lock\") pod \"installer-9-crc\" (UID: \"33461604-9b05-4934-90f2-1642fdd1c40a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 14:19:38 crc kubenswrapper[4902]: I1202 14:19:38.235651 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/33461604-9b05-4934-90f2-1642fdd1c40a-kubelet-dir\") pod \"installer-9-crc\" (UID: \"33461604-9b05-4934-90f2-1642fdd1c40a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 14:19:38 crc kubenswrapper[4902]: I1202 14:19:38.235730 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33461604-9b05-4934-90f2-1642fdd1c40a-kube-api-access\") pod \"installer-9-crc\" (UID: \"33461604-9b05-4934-90f2-1642fdd1c40a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 14:19:38 crc kubenswrapper[4902]: I1202 14:19:38.235883 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/33461604-9b05-4934-90f2-1642fdd1c40a-kubelet-dir\") pod \"installer-9-crc\" (UID: \"33461604-9b05-4934-90f2-1642fdd1c40a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 14:19:38 crc kubenswrapper[4902]: I1202 14:19:38.256940 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33461604-9b05-4934-90f2-1642fdd1c40a-kube-api-access\") pod \"installer-9-crc\" (UID: \"33461604-9b05-4934-90f2-1642fdd1c40a\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 14:19:38 crc kubenswrapper[4902]: I1202 14:19:38.348441 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 14:19:52 crc kubenswrapper[4902]: E1202 14:19:52.412127 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 02 14:19:52 crc kubenswrapper[4902]: E1202 14:19:52.412962 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kqjlv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-6shgl_openshift-marketplace(4128c863-697c-459c-954f-f3226bd690ea): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 14:19:52 crc kubenswrapper[4902]: E1202 14:19:52.414416 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-6shgl" podUID="4128c863-697c-459c-954f-f3226bd690ea" Dec 02 14:19:57 crc kubenswrapper[4902]: E1202 14:19:57.634774 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-6shgl" podUID="4128c863-697c-459c-954f-f3226bd690ea" Dec 02 14:19:57 crc kubenswrapper[4902]: E1202 14:19:57.819682 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 02 14:19:57 crc kubenswrapper[4902]: E1202 14:19:57.819944 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kkbdz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-qwhpp_openshift-marketplace(f9a5fd22-f2a6-450e-a1f5-d5787f6368a7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 14:19:57 crc kubenswrapper[4902]: E1202 14:19:57.823068 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-qwhpp" podUID="f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" Dec 02 14:19:57 crc kubenswrapper[4902]: E1202 14:19:57.913433 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 02 14:19:57 crc kubenswrapper[4902]: E1202 14:19:57.913822 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2gxcd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-77jdb_openshift-marketplace(3a1c73ba-580d-4532-b7d7-8e263943bb06): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 14:19:57 crc kubenswrapper[4902]: E1202 14:19:57.915032 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-77jdb" podUID="3a1c73ba-580d-4532-b7d7-8e263943bb06" Dec 02 14:19:58 crc kubenswrapper[4902]: I1202 14:19:58.041862 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 14:19:58 crc kubenswrapper[4902]: I1202 14:19:58.277790 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 14:19:58 crc kubenswrapper[4902]: E1202 14:19:58.334320 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 02 14:19:58 crc kubenswrapper[4902]: E1202 14:19:58.335268 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f479k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-4wsv4_openshift-marketplace(6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 14:19:58 crc kubenswrapper[4902]: E1202 14:19:58.336527 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-4wsv4" podUID="6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" Dec 02 14:19:58 crc kubenswrapper[4902]: E1202 14:19:58.351317 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 02 14:19:58 crc kubenswrapper[4902]: E1202 14:19:58.351498 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-crjqv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-cg77b_openshift-marketplace(ab032431-54e6-42b3-b682-8c18f5f29f81): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 14:19:58 crc kubenswrapper[4902]: E1202 14:19:58.352966 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-cg77b" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" Dec 02 14:19:58 crc kubenswrapper[4902]: I1202 14:19:58.848790 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-tlnwn" event={"ID":"b63b8b19-f855-4038-891d-6bfd1e5021de","Type":"ContainerStarted","Data":"6e1ebf026b4573cbce549637eb5d65304340bd3b7148d98dfc9411eda4468bb2"} Dec 02 14:19:58 crc kubenswrapper[4902]: I1202 14:19:58.849909 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"33461604-9b05-4934-90f2-1642fdd1c40a","Type":"ContainerStarted","Data":"5d66aeb49f95b07c0b02eba92075b1b1786a6e0a2f686efe8e80c5bfb45c9057"} Dec 02 14:19:58 crc kubenswrapper[4902]: I1202 14:19:58.887148 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-tlnwn" podStartSLOduration=207.887128123 podStartE2EDuration="3m27.887128123s" podCreationTimestamp="2025-12-02 14:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:19:58.865239836 +0000 UTC m=+230.056548555" watchObservedRunningTime="2025-12-02 14:19:58.887128123 +0000 UTC m=+230.078436852" Dec 02 14:20:01 crc kubenswrapper[4902]: E1202 14:20:01.165581 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-77jdb" podUID="3a1c73ba-580d-4532-b7d7-8e263943bb06" Dec 02 14:20:01 crc kubenswrapper[4902]: E1202 14:20:01.165663 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-4wsv4" podUID="6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" Dec 02 14:20:01 crc kubenswrapper[4902]: E1202 14:20:01.165824 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-cg77b" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" Dec 02 14:20:01 crc kubenswrapper[4902]: W1202 14:20:01.169451 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podda07947e_84ca_4cc6_9e55_f6b99bfca9c6.slice/crio-b1757342b8914ea371233797546550f6656697f544d3f2b79e449c4affbcfe45 WatchSource:0}: Error finding container b1757342b8914ea371233797546550f6656697f544d3f2b79e449c4affbcfe45: Status 404 returned error can't find the container with id b1757342b8914ea371233797546550f6656697f544d3f2b79e449c4affbcfe45 Dec 02 14:20:01 crc kubenswrapper[4902]: E1202 14:20:01.206201 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 02 14:20:01 crc kubenswrapper[4902]: E1202 14:20:01.206368 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7ktd7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-t89xz_openshift-marketplace(caf53037-752b-4821-8e41-81976a0a0554): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 14:20:01 crc kubenswrapper[4902]: E1202 14:20:01.207764 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-t89xz" podUID="caf53037-752b-4821-8e41-81976a0a0554" Dec 02 14:20:01 crc kubenswrapper[4902]: E1202 14:20:01.221199 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 02 14:20:01 crc kubenswrapper[4902]: E1202 14:20:01.221396 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nqsrs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-9tzqn_openshift-marketplace(fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 14:20:01 crc kubenswrapper[4902]: E1202 14:20:01.222629 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-9tzqn" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" Dec 02 14:20:01 crc kubenswrapper[4902]: I1202 14:20:01.864775 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"da07947e-84ca-4cc6-9e55-f6b99bfca9c6","Type":"ContainerStarted","Data":"f45191502aaecb56abbb8eba3fd79bfda3cdf60ba7b35eaab46fb8219b519b5f"} Dec 02 14:20:01 crc kubenswrapper[4902]: I1202 14:20:01.865066 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"da07947e-84ca-4cc6-9e55-f6b99bfca9c6","Type":"ContainerStarted","Data":"b1757342b8914ea371233797546550f6656697f544d3f2b79e449c4affbcfe45"} Dec 02 14:20:01 crc kubenswrapper[4902]: I1202 14:20:01.867182 4902 generic.go:334] "Generic (PLEG): container finished" podID="1467a1b3-3c32-4c86-a5ee-5e0f6f112519" containerID="68ae2ce6672dc19411cfa45472c584d3151f10e4de9edd9a9a9c0c56024b6fc3" exitCode=0 Dec 02 14:20:01 crc kubenswrapper[4902]: I1202 14:20:01.867328 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4f746" event={"ID":"1467a1b3-3c32-4c86-a5ee-5e0f6f112519","Type":"ContainerDied","Data":"68ae2ce6672dc19411cfa45472c584d3151f10e4de9edd9a9a9c0c56024b6fc3"} Dec 02 14:20:01 crc kubenswrapper[4902]: I1202 14:20:01.868730 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"33461604-9b05-4934-90f2-1642fdd1c40a","Type":"ContainerStarted","Data":"9846b686acb01071f330f33d5872128b19492869e7fc918c05a323c7acab627e"} Dec 02 14:20:01 crc kubenswrapper[4902]: I1202 14:20:01.910546 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=23.910529838 podStartE2EDuration="23.910529838s" podCreationTimestamp="2025-12-02 14:19:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:20:01.908744097 +0000 UTC m=+233.100052816" watchObservedRunningTime="2025-12-02 14:20:01.910529838 +0000 UTC m=+233.101838547" Dec 02 14:20:01 crc kubenswrapper[4902]: I1202 14:20:01.911161 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=29.911153836 podStartE2EDuration="29.911153836s" podCreationTimestamp="2025-12-02 14:19:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:20:01.895027684 +0000 UTC m=+233.086336443" watchObservedRunningTime="2025-12-02 14:20:01.911153836 +0000 UTC m=+233.102462555" Dec 02 14:20:02 crc kubenswrapper[4902]: I1202 14:20:02.878955 4902 generic.go:334] "Generic (PLEG): container finished" podID="da07947e-84ca-4cc6-9e55-f6b99bfca9c6" containerID="f45191502aaecb56abbb8eba3fd79bfda3cdf60ba7b35eaab46fb8219b519b5f" exitCode=0 Dec 02 14:20:02 crc kubenswrapper[4902]: I1202 14:20:02.879038 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"da07947e-84ca-4cc6-9e55-f6b99bfca9c6","Type":"ContainerDied","Data":"f45191502aaecb56abbb8eba3fd79bfda3cdf60ba7b35eaab46fb8219b519b5f"} Dec 02 14:20:03 crc kubenswrapper[4902]: I1202 14:20:03.885376 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4f746" event={"ID":"1467a1b3-3c32-4c86-a5ee-5e0f6f112519","Type":"ContainerStarted","Data":"bb7441fdff7728b078fb0a70f053a16da19513cb09c442a161767f8ea27cff7f"} Dec 02 14:20:03 crc kubenswrapper[4902]: I1202 14:20:03.922188 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4f746" podStartSLOduration=2.444539456 podStartE2EDuration="1m10.922166467s" podCreationTimestamp="2025-12-02 14:18:53 +0000 UTC" firstStartedPulling="2025-12-02 14:18:54.388655568 +0000 UTC m=+165.579964277" lastFinishedPulling="2025-12-02 14:20:02.866282539 +0000 UTC m=+234.057591288" observedRunningTime="2025-12-02 14:20:03.911506892 +0000 UTC m=+235.102815611" watchObservedRunningTime="2025-12-02 14:20:03.922166467 +0000 UTC m=+235.113475176" Dec 02 14:20:04 crc kubenswrapper[4902]: I1202 14:20:04.126443 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 14:20:04 crc kubenswrapper[4902]: I1202 14:20:04.174943 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/da07947e-84ca-4cc6-9e55-f6b99bfca9c6-kube-api-access\") pod \"da07947e-84ca-4cc6-9e55-f6b99bfca9c6\" (UID: \"da07947e-84ca-4cc6-9e55-f6b99bfca9c6\") " Dec 02 14:20:04 crc kubenswrapper[4902]: I1202 14:20:04.175079 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/da07947e-84ca-4cc6-9e55-f6b99bfca9c6-kubelet-dir\") pod \"da07947e-84ca-4cc6-9e55-f6b99bfca9c6\" (UID: \"da07947e-84ca-4cc6-9e55-f6b99bfca9c6\") " Dec 02 14:20:04 crc kubenswrapper[4902]: I1202 14:20:04.175195 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/da07947e-84ca-4cc6-9e55-f6b99bfca9c6-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "da07947e-84ca-4cc6-9e55-f6b99bfca9c6" (UID: "da07947e-84ca-4cc6-9e55-f6b99bfca9c6"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:20:04 crc kubenswrapper[4902]: I1202 14:20:04.175515 4902 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/da07947e-84ca-4cc6-9e55-f6b99bfca9c6-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:04 crc kubenswrapper[4902]: I1202 14:20:04.179801 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da07947e-84ca-4cc6-9e55-f6b99bfca9c6-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "da07947e-84ca-4cc6-9e55-f6b99bfca9c6" (UID: "da07947e-84ca-4cc6-9e55-f6b99bfca9c6"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:20:04 crc kubenswrapper[4902]: I1202 14:20:04.276877 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/da07947e-84ca-4cc6-9e55-f6b99bfca9c6-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:04 crc kubenswrapper[4902]: I1202 14:20:04.731966 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:20:04 crc kubenswrapper[4902]: I1202 14:20:04.732040 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:20:04 crc kubenswrapper[4902]: I1202 14:20:04.732105 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:20:04 crc kubenswrapper[4902]: I1202 14:20:04.732837 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 14:20:04 crc kubenswrapper[4902]: I1202 14:20:04.732959 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d" gracePeriod=600 Dec 02 14:20:04 crc kubenswrapper[4902]: I1202 14:20:04.892181 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"da07947e-84ca-4cc6-9e55-f6b99bfca9c6","Type":"ContainerDied","Data":"b1757342b8914ea371233797546550f6656697f544d3f2b79e449c4affbcfe45"} Dec 02 14:20:04 crc kubenswrapper[4902]: I1202 14:20:04.892231 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1757342b8914ea371233797546550f6656697f544d3f2b79e449c4affbcfe45" Dec 02 14:20:04 crc kubenswrapper[4902]: I1202 14:20:04.892235 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 14:20:05 crc kubenswrapper[4902]: I1202 14:20:05.899453 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d" exitCode=0 Dec 02 14:20:05 crc kubenswrapper[4902]: I1202 14:20:05.899578 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d"} Dec 02 14:20:05 crc kubenswrapper[4902]: I1202 14:20:05.899786 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"cfd4c9507221549852cd9e29206bbded528186bac0d16f1fe3d9e7ce1e5270e2"} Dec 02 14:20:10 crc kubenswrapper[4902]: I1202 14:20:10.926211 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6shgl" event={"ID":"4128c863-697c-459c-954f-f3226bd690ea","Type":"ContainerStarted","Data":"f8a08dfba29cd770d17b2de54d845053a49d6b2823c5d2c2d6462efb5d1d45af"} Dec 02 14:20:11 crc kubenswrapper[4902]: I1202 14:20:11.932252 4902 generic.go:334] "Generic (PLEG): container finished" podID="4128c863-697c-459c-954f-f3226bd690ea" containerID="f8a08dfba29cd770d17b2de54d845053a49d6b2823c5d2c2d6462efb5d1d45af" exitCode=0 Dec 02 14:20:11 crc kubenswrapper[4902]: I1202 14:20:11.932370 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6shgl" event={"ID":"4128c863-697c-459c-954f-f3226bd690ea","Type":"ContainerDied","Data":"f8a08dfba29cd770d17b2de54d845053a49d6b2823c5d2c2d6462efb5d1d45af"} Dec 02 14:20:12 crc kubenswrapper[4902]: I1202 14:20:12.940246 4902 generic.go:334] "Generic (PLEG): container finished" podID="f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" containerID="58635d73c77ce279ceb691ea4df7c1c7bbf577cdb203c42ab02d6e31609ec314" exitCode=0 Dec 02 14:20:12 crc kubenswrapper[4902]: I1202 14:20:12.940317 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qwhpp" event={"ID":"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7","Type":"ContainerDied","Data":"58635d73c77ce279ceb691ea4df7c1c7bbf577cdb203c42ab02d6e31609ec314"} Dec 02 14:20:13 crc kubenswrapper[4902]: I1202 14:20:13.364058 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:20:13 crc kubenswrapper[4902]: I1202 14:20:13.364111 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:20:13 crc kubenswrapper[4902]: I1202 14:20:13.425557 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:20:13 crc kubenswrapper[4902]: I1202 14:20:13.987394 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:20:14 crc kubenswrapper[4902]: I1202 14:20:14.955656 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9tzqn" event={"ID":"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2","Type":"ContainerStarted","Data":"b3ff5ffd1daba04c4ae0d603f165efb04e8909b196ff2c4638e3708392ed8935"} Dec 02 14:20:14 crc kubenswrapper[4902]: I1202 14:20:14.959652 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6shgl" event={"ID":"4128c863-697c-459c-954f-f3226bd690ea","Type":"ContainerStarted","Data":"ba88f6884e6a9c252e2c7e293bbe6083b53b124e0939de29905d28b506531ab6"} Dec 02 14:20:14 crc kubenswrapper[4902]: I1202 14:20:14.967285 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qwhpp" event={"ID":"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7","Type":"ContainerStarted","Data":"b6a6ebded4979b7fa01c80ad15a81ac98961f83406a0b5ddd454d23089a6e46f"} Dec 02 14:20:14 crc kubenswrapper[4902]: I1202 14:20:14.969040 4902 generic.go:334] "Generic (PLEG): container finished" podID="6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" containerID="ddb04958cd0ccc60a34ec5a454ff24fa0a64c0e44a3b7ee397cc6db2fb312a77" exitCode=0 Dec 02 14:20:14 crc kubenswrapper[4902]: I1202 14:20:14.969122 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wsv4" event={"ID":"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6","Type":"ContainerDied","Data":"ddb04958cd0ccc60a34ec5a454ff24fa0a64c0e44a3b7ee397cc6db2fb312a77"} Dec 02 14:20:14 crc kubenswrapper[4902]: I1202 14:20:14.995365 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6shgl" podStartSLOduration=5.128300304 podStartE2EDuration="1m23.995346694s" podCreationTimestamp="2025-12-02 14:18:51 +0000 UTC" firstStartedPulling="2025-12-02 14:18:54.290752594 +0000 UTC m=+165.482061313" lastFinishedPulling="2025-12-02 14:20:13.157798994 +0000 UTC m=+244.349107703" observedRunningTime="2025-12-02 14:20:14.992015398 +0000 UTC m=+246.183324107" watchObservedRunningTime="2025-12-02 14:20:14.995346694 +0000 UTC m=+246.186655403" Dec 02 14:20:15 crc kubenswrapper[4902]: I1202 14:20:15.009265 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qwhpp" podStartSLOduration=7.294032566 podStartE2EDuration="1m22.009245042s" podCreationTimestamp="2025-12-02 14:18:53 +0000 UTC" firstStartedPulling="2025-12-02 14:18:59.460750626 +0000 UTC m=+170.652059345" lastFinishedPulling="2025-12-02 14:20:14.175963112 +0000 UTC m=+245.367271821" observedRunningTime="2025-12-02 14:20:15.009220422 +0000 UTC m=+246.200529131" watchObservedRunningTime="2025-12-02 14:20:15.009245042 +0000 UTC m=+246.200553751" Dec 02 14:20:16 crc kubenswrapper[4902]: I1202 14:20:16.061606 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-77jdb" event={"ID":"3a1c73ba-580d-4532-b7d7-8e263943bb06","Type":"ContainerStarted","Data":"1f0ec12e4c1fb79d28cad9f4a04ef5dcb095e2ba406c7d5e39e9333ee16f9314"} Dec 02 14:20:16 crc kubenswrapper[4902]: I1202 14:20:16.064291 4902 generic.go:334] "Generic (PLEG): container finished" podID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" containerID="b3ff5ffd1daba04c4ae0d603f165efb04e8909b196ff2c4638e3708392ed8935" exitCode=0 Dec 02 14:20:16 crc kubenswrapper[4902]: I1202 14:20:16.064323 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9tzqn" event={"ID":"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2","Type":"ContainerDied","Data":"b3ff5ffd1daba04c4ae0d603f165efb04e8909b196ff2c4638e3708392ed8935"} Dec 02 14:20:17 crc kubenswrapper[4902]: I1202 14:20:17.087759 4902 generic.go:334] "Generic (PLEG): container finished" podID="3a1c73ba-580d-4532-b7d7-8e263943bb06" containerID="1f0ec12e4c1fb79d28cad9f4a04ef5dcb095e2ba406c7d5e39e9333ee16f9314" exitCode=0 Dec 02 14:20:17 crc kubenswrapper[4902]: I1202 14:20:17.087811 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-77jdb" event={"ID":"3a1c73ba-580d-4532-b7d7-8e263943bb06","Type":"ContainerDied","Data":"1f0ec12e4c1fb79d28cad9f4a04ef5dcb095e2ba406c7d5e39e9333ee16f9314"} Dec 02 14:20:17 crc kubenswrapper[4902]: I1202 14:20:17.091414 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t89xz" event={"ID":"caf53037-752b-4821-8e41-81976a0a0554","Type":"ContainerStarted","Data":"1d12d3e933230fd6e420928974c58408fa4af15c63d334878f6df8c53ba00d4c"} Dec 02 14:20:17 crc kubenswrapper[4902]: I1202 14:20:17.094231 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wsv4" event={"ID":"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6","Type":"ContainerStarted","Data":"f81fbd73c3996cb02e7e6af08328374e26b5a529abb56fdb3c320a3431aa23b7"} Dec 02 14:20:18 crc kubenswrapper[4902]: I1202 14:20:18.101169 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg77b" event={"ID":"ab032431-54e6-42b3-b682-8c18f5f29f81","Type":"ContainerStarted","Data":"4ccd02d97c770b3dbf55aeccf9b85baaebbb6294cd2686ea11f3ab621d6f3c1b"} Dec 02 14:20:18 crc kubenswrapper[4902]: I1202 14:20:18.103144 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9tzqn" event={"ID":"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2","Type":"ContainerStarted","Data":"1d233f46761f6b72a84d0d3f21c6beb0e088eae64b7f16ccbebdfdfc53896064"} Dec 02 14:20:18 crc kubenswrapper[4902]: I1202 14:20:18.120530 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4wsv4" podStartSLOduration=6.096179402 podStartE2EDuration="1m28.120518275s" podCreationTimestamp="2025-12-02 14:18:50 +0000 UTC" firstStartedPulling="2025-12-02 14:18:54.385092796 +0000 UTC m=+165.576401505" lastFinishedPulling="2025-12-02 14:20:16.409431679 +0000 UTC m=+247.600740378" observedRunningTime="2025-12-02 14:20:17.177736438 +0000 UTC m=+248.369045187" watchObservedRunningTime="2025-12-02 14:20:18.120518275 +0000 UTC m=+249.311827004" Dec 02 14:20:18 crc kubenswrapper[4902]: I1202 14:20:18.137384 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9tzqn" podStartSLOduration=22.057541636 podStartE2EDuration="1m24.137362298s" podCreationTimestamp="2025-12-02 14:18:54 +0000 UTC" firstStartedPulling="2025-12-02 14:19:15.398983873 +0000 UTC m=+186.590292592" lastFinishedPulling="2025-12-02 14:20:17.478804545 +0000 UTC m=+248.670113254" observedRunningTime="2025-12-02 14:20:18.133377274 +0000 UTC m=+249.324686003" watchObservedRunningTime="2025-12-02 14:20:18.137362298 +0000 UTC m=+249.328671007" Dec 02 14:20:19 crc kubenswrapper[4902]: I1202 14:20:19.114295 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-77jdb" event={"ID":"3a1c73ba-580d-4532-b7d7-8e263943bb06","Type":"ContainerStarted","Data":"2ed0e85c95797472bbe3856122c454e580b6cd6ab396f40df5e3f5e04aaf8084"} Dec 02 14:20:19 crc kubenswrapper[4902]: I1202 14:20:19.115978 4902 generic.go:334] "Generic (PLEG): container finished" podID="ab032431-54e6-42b3-b682-8c18f5f29f81" containerID="4ccd02d97c770b3dbf55aeccf9b85baaebbb6294cd2686ea11f3ab621d6f3c1b" exitCode=0 Dec 02 14:20:19 crc kubenswrapper[4902]: I1202 14:20:19.116016 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg77b" event={"ID":"ab032431-54e6-42b3-b682-8c18f5f29f81","Type":"ContainerDied","Data":"4ccd02d97c770b3dbf55aeccf9b85baaebbb6294cd2686ea11f3ab621d6f3c1b"} Dec 02 14:20:19 crc kubenswrapper[4902]: I1202 14:20:19.122706 4902 generic.go:334] "Generic (PLEG): container finished" podID="caf53037-752b-4821-8e41-81976a0a0554" containerID="1d12d3e933230fd6e420928974c58408fa4af15c63d334878f6df8c53ba00d4c" exitCode=0 Dec 02 14:20:19 crc kubenswrapper[4902]: I1202 14:20:19.122746 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t89xz" event={"ID":"caf53037-752b-4821-8e41-81976a0a0554","Type":"ContainerDied","Data":"1d12d3e933230fd6e420928974c58408fa4af15c63d334878f6df8c53ba00d4c"} Dec 02 14:20:21 crc kubenswrapper[4902]: I1202 14:20:21.158804 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-77jdb" podStartSLOduration=6.496970078 podStartE2EDuration="1m30.158785646s" podCreationTimestamp="2025-12-02 14:18:51 +0000 UTC" firstStartedPulling="2025-12-02 14:18:54.29479034 +0000 UTC m=+165.486099059" lastFinishedPulling="2025-12-02 14:20:17.956605918 +0000 UTC m=+249.147914627" observedRunningTime="2025-12-02 14:20:21.15543835 +0000 UTC m=+252.346747059" watchObservedRunningTime="2025-12-02 14:20:21.158785646 +0000 UTC m=+252.350094355" Dec 02 14:20:21 crc kubenswrapper[4902]: I1202 14:20:21.329226 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:20:21 crc kubenswrapper[4902]: I1202 14:20:21.329605 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:20:21 crc kubenswrapper[4902]: I1202 14:20:21.414758 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:20:21 crc kubenswrapper[4902]: I1202 14:20:21.653889 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:20:21 crc kubenswrapper[4902]: I1202 14:20:21.653933 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:20:21 crc kubenswrapper[4902]: I1202 14:20:21.691552 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:20:21 crc kubenswrapper[4902]: I1202 14:20:21.868272 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:20:21 crc kubenswrapper[4902]: I1202 14:20:21.868326 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:20:21 crc kubenswrapper[4902]: I1202 14:20:21.913252 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:20:22 crc kubenswrapper[4902]: I1202 14:20:22.177573 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:20:22 crc kubenswrapper[4902]: I1202 14:20:22.208243 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:20:23 crc kubenswrapper[4902]: I1202 14:20:23.260288 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:20:23 crc kubenswrapper[4902]: I1202 14:20:23.353428 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6shgl"] Dec 02 14:20:23 crc kubenswrapper[4902]: I1202 14:20:23.990169 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:20:23 crc kubenswrapper[4902]: I1202 14:20:23.990318 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:20:24 crc kubenswrapper[4902]: I1202 14:20:24.043995 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:20:24 crc kubenswrapper[4902]: I1202 14:20:24.157394 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6shgl" podUID="4128c863-697c-459c-954f-f3226bd690ea" containerName="registry-server" containerID="cri-o://ba88f6884e6a9c252e2c7e293bbe6083b53b124e0939de29905d28b506531ab6" gracePeriod=2 Dec 02 14:20:24 crc kubenswrapper[4902]: I1202 14:20:24.206933 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:20:24 crc kubenswrapper[4902]: I1202 14:20:24.828241 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:20:24 crc kubenswrapper[4902]: I1202 14:20:24.828301 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:20:24 crc kubenswrapper[4902]: I1202 14:20:24.886895 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:20:25 crc kubenswrapper[4902]: I1202 14:20:25.167293 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-77jdb"] Dec 02 14:20:25 crc kubenswrapper[4902]: I1202 14:20:25.168453 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-77jdb" podUID="3a1c73ba-580d-4532-b7d7-8e263943bb06" containerName="registry-server" containerID="cri-o://2ed0e85c95797472bbe3856122c454e580b6cd6ab396f40df5e3f5e04aaf8084" gracePeriod=2 Dec 02 14:20:25 crc kubenswrapper[4902]: I1202 14:20:25.276855 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:20:27 crc kubenswrapper[4902]: I1202 14:20:27.559406 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qwhpp"] Dec 02 14:20:27 crc kubenswrapper[4902]: I1202 14:20:27.559930 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qwhpp" podUID="f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" containerName="registry-server" containerID="cri-o://b6a6ebded4979b7fa01c80ad15a81ac98961f83406a0b5ddd454d23089a6e46f" gracePeriod=2 Dec 02 14:20:28 crc kubenswrapper[4902]: I1202 14:20:28.157184 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9tzqn"] Dec 02 14:20:28 crc kubenswrapper[4902]: I1202 14:20:28.157400 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9tzqn" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" containerName="registry-server" containerID="cri-o://1d233f46761f6b72a84d0d3f21c6beb0e088eae64b7f16ccbebdfdfc53896064" gracePeriod=2 Dec 02 14:20:28 crc kubenswrapper[4902]: I1202 14:20:28.693122 4902 generic.go:334] "Generic (PLEG): container finished" podID="4128c863-697c-459c-954f-f3226bd690ea" containerID="ba88f6884e6a9c252e2c7e293bbe6083b53b124e0939de29905d28b506531ab6" exitCode=0 Dec 02 14:20:28 crc kubenswrapper[4902]: I1202 14:20:28.693229 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6shgl" event={"ID":"4128c863-697c-459c-954f-f3226bd690ea","Type":"ContainerDied","Data":"ba88f6884e6a9c252e2c7e293bbe6083b53b124e0939de29905d28b506531ab6"} Dec 02 14:20:29 crc kubenswrapper[4902]: I1202 14:20:29.362885 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:20:29 crc kubenswrapper[4902]: I1202 14:20:29.540868 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4128c863-697c-459c-954f-f3226bd690ea-catalog-content\") pod \"4128c863-697c-459c-954f-f3226bd690ea\" (UID: \"4128c863-697c-459c-954f-f3226bd690ea\") " Dec 02 14:20:29 crc kubenswrapper[4902]: I1202 14:20:29.540952 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4128c863-697c-459c-954f-f3226bd690ea-utilities\") pod \"4128c863-697c-459c-954f-f3226bd690ea\" (UID: \"4128c863-697c-459c-954f-f3226bd690ea\") " Dec 02 14:20:29 crc kubenswrapper[4902]: I1202 14:20:29.540998 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqjlv\" (UniqueName: \"kubernetes.io/projected/4128c863-697c-459c-954f-f3226bd690ea-kube-api-access-kqjlv\") pod \"4128c863-697c-459c-954f-f3226bd690ea\" (UID: \"4128c863-697c-459c-954f-f3226bd690ea\") " Dec 02 14:20:29 crc kubenswrapper[4902]: I1202 14:20:29.542419 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4128c863-697c-459c-954f-f3226bd690ea-utilities" (OuterVolumeSpecName: "utilities") pod "4128c863-697c-459c-954f-f3226bd690ea" (UID: "4128c863-697c-459c-954f-f3226bd690ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:20:29 crc kubenswrapper[4902]: I1202 14:20:29.550505 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4128c863-697c-459c-954f-f3226bd690ea-kube-api-access-kqjlv" (OuterVolumeSpecName: "kube-api-access-kqjlv") pod "4128c863-697c-459c-954f-f3226bd690ea" (UID: "4128c863-697c-459c-954f-f3226bd690ea"). InnerVolumeSpecName "kube-api-access-kqjlv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:20:29 crc kubenswrapper[4902]: I1202 14:20:29.642058 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4128c863-697c-459c-954f-f3226bd690ea-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:29 crc kubenswrapper[4902]: I1202 14:20:29.642099 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqjlv\" (UniqueName: \"kubernetes.io/projected/4128c863-697c-459c-954f-f3226bd690ea-kube-api-access-kqjlv\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:29 crc kubenswrapper[4902]: I1202 14:20:29.704127 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6shgl" event={"ID":"4128c863-697c-459c-954f-f3226bd690ea","Type":"ContainerDied","Data":"4c8547cac2dae081991bc5c732b2c4e48b25511e7cc6e6f725279ffa4279f296"} Dec 02 14:20:29 crc kubenswrapper[4902]: I1202 14:20:29.704179 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6shgl" Dec 02 14:20:29 crc kubenswrapper[4902]: I1202 14:20:29.704215 4902 scope.go:117] "RemoveContainer" containerID="ba88f6884e6a9c252e2c7e293bbe6083b53b124e0939de29905d28b506531ab6" Dec 02 14:20:29 crc kubenswrapper[4902]: I1202 14:20:29.709615 4902 generic.go:334] "Generic (PLEG): container finished" podID="f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" containerID="b6a6ebded4979b7fa01c80ad15a81ac98961f83406a0b5ddd454d23089a6e46f" exitCode=0 Dec 02 14:20:29 crc kubenswrapper[4902]: I1202 14:20:29.709714 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qwhpp" event={"ID":"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7","Type":"ContainerDied","Data":"b6a6ebded4979b7fa01c80ad15a81ac98961f83406a0b5ddd454d23089a6e46f"} Dec 02 14:20:29 crc kubenswrapper[4902]: I1202 14:20:29.714254 4902 generic.go:334] "Generic (PLEG): container finished" podID="3a1c73ba-580d-4532-b7d7-8e263943bb06" containerID="2ed0e85c95797472bbe3856122c454e580b6cd6ab396f40df5e3f5e04aaf8084" exitCode=0 Dec 02 14:20:29 crc kubenswrapper[4902]: I1202 14:20:29.714308 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-77jdb" event={"ID":"3a1c73ba-580d-4532-b7d7-8e263943bb06","Type":"ContainerDied","Data":"2ed0e85c95797472bbe3856122c454e580b6cd6ab396f40df5e3f5e04aaf8084"} Dec 02 14:20:30 crc kubenswrapper[4902]: I1202 14:20:30.154785 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4128c863-697c-459c-954f-f3226bd690ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4128c863-697c-459c-954f-f3226bd690ea" (UID: "4128c863-697c-459c-954f-f3226bd690ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:20:30 crc kubenswrapper[4902]: I1202 14:20:30.252488 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4128c863-697c-459c-954f-f3226bd690ea-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:30 crc kubenswrapper[4902]: I1202 14:20:30.358189 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6shgl"] Dec 02 14:20:30 crc kubenswrapper[4902]: I1202 14:20:30.365010 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6shgl"] Dec 02 14:20:31 crc kubenswrapper[4902]: I1202 14:20:31.114124 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4128c863-697c-459c-954f-f3226bd690ea" path="/var/lib/kubelet/pods/4128c863-697c-459c-954f-f3226bd690ea/volumes" Dec 02 14:20:31 crc kubenswrapper[4902]: E1202 14:20:31.654453 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ed0e85c95797472bbe3856122c454e580b6cd6ab396f40df5e3f5e04aaf8084 is running failed: container process not found" containerID="2ed0e85c95797472bbe3856122c454e580b6cd6ab396f40df5e3f5e04aaf8084" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 14:20:31 crc kubenswrapper[4902]: E1202 14:20:31.655076 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ed0e85c95797472bbe3856122c454e580b6cd6ab396f40df5e3f5e04aaf8084 is running failed: container process not found" containerID="2ed0e85c95797472bbe3856122c454e580b6cd6ab396f40df5e3f5e04aaf8084" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 14:20:31 crc kubenswrapper[4902]: E1202 14:20:31.655464 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ed0e85c95797472bbe3856122c454e580b6cd6ab396f40df5e3f5e04aaf8084 is running failed: container process not found" containerID="2ed0e85c95797472bbe3856122c454e580b6cd6ab396f40df5e3f5e04aaf8084" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 14:20:31 crc kubenswrapper[4902]: E1202 14:20:31.655517 4902 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ed0e85c95797472bbe3856122c454e580b6cd6ab396f40df5e3f5e04aaf8084 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-77jdb" podUID="3a1c73ba-580d-4532-b7d7-8e263943bb06" containerName="registry-server" Dec 02 14:20:31 crc kubenswrapper[4902]: I1202 14:20:31.733150 4902 generic.go:334] "Generic (PLEG): container finished" podID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" containerID="1d233f46761f6b72a84d0d3f21c6beb0e088eae64b7f16ccbebdfdfc53896064" exitCode=0 Dec 02 14:20:31 crc kubenswrapper[4902]: I1202 14:20:31.733252 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9tzqn" event={"ID":"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2","Type":"ContainerDied","Data":"1d233f46761f6b72a84d0d3f21c6beb0e088eae64b7f16ccbebdfdfc53896064"} Dec 02 14:20:32 crc kubenswrapper[4902]: I1202 14:20:32.973316 4902 scope.go:117] "RemoveContainer" containerID="f8a08dfba29cd770d17b2de54d845053a49d6b2823c5d2c2d6462efb5d1d45af" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.072526 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.079414 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.138411 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-catalog-content\") pod \"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7\" (UID: \"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7\") " Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.139548 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gxcd\" (UniqueName: \"kubernetes.io/projected/3a1c73ba-580d-4532-b7d7-8e263943bb06-kube-api-access-2gxcd\") pod \"3a1c73ba-580d-4532-b7d7-8e263943bb06\" (UID: \"3a1c73ba-580d-4532-b7d7-8e263943bb06\") " Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.139698 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a1c73ba-580d-4532-b7d7-8e263943bb06-utilities\") pod \"3a1c73ba-580d-4532-b7d7-8e263943bb06\" (UID: \"3a1c73ba-580d-4532-b7d7-8e263943bb06\") " Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.139744 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkbdz\" (UniqueName: \"kubernetes.io/projected/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-kube-api-access-kkbdz\") pod \"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7\" (UID: \"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7\") " Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.139813 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a1c73ba-580d-4532-b7d7-8e263943bb06-catalog-content\") pod \"3a1c73ba-580d-4532-b7d7-8e263943bb06\" (UID: \"3a1c73ba-580d-4532-b7d7-8e263943bb06\") " Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.141194 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a1c73ba-580d-4532-b7d7-8e263943bb06-utilities" (OuterVolumeSpecName: "utilities") pod "3a1c73ba-580d-4532-b7d7-8e263943bb06" (UID: "3a1c73ba-580d-4532-b7d7-8e263943bb06"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.147506 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-kube-api-access-kkbdz" (OuterVolumeSpecName: "kube-api-access-kkbdz") pod "f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" (UID: "f9a5fd22-f2a6-450e-a1f5-d5787f6368a7"). InnerVolumeSpecName "kube-api-access-kkbdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.147967 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a1c73ba-580d-4532-b7d7-8e263943bb06-kube-api-access-2gxcd" (OuterVolumeSpecName: "kube-api-access-2gxcd") pod "3a1c73ba-580d-4532-b7d7-8e263943bb06" (UID: "3a1c73ba-580d-4532-b7d7-8e263943bb06"). InnerVolumeSpecName "kube-api-access-2gxcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.167031 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" (UID: "f9a5fd22-f2a6-450e-a1f5-d5787f6368a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.200744 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a1c73ba-580d-4532-b7d7-8e263943bb06-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3a1c73ba-580d-4532-b7d7-8e263943bb06" (UID: "3a1c73ba-580d-4532-b7d7-8e263943bb06"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.242262 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-utilities\") pod \"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7\" (UID: \"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7\") " Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.242575 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.242590 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gxcd\" (UniqueName: \"kubernetes.io/projected/3a1c73ba-580d-4532-b7d7-8e263943bb06-kube-api-access-2gxcd\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.242601 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a1c73ba-580d-4532-b7d7-8e263943bb06-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.242612 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkbdz\" (UniqueName: \"kubernetes.io/projected/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-kube-api-access-kkbdz\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.242645 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a1c73ba-580d-4532-b7d7-8e263943bb06-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.243051 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-utilities" (OuterVolumeSpecName: "utilities") pod "f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" (UID: "f9a5fd22-f2a6-450e-a1f5-d5787f6368a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.343675 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.757197 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-77jdb" event={"ID":"3a1c73ba-580d-4532-b7d7-8e263943bb06","Type":"ContainerDied","Data":"650fa069a575212a3364e069f763ec0606435103a3a7f509f0f550595f5645b6"} Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.757269 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-77jdb" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.762023 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qwhpp" event={"ID":"f9a5fd22-f2a6-450e-a1f5-d5787f6368a7","Type":"ContainerDied","Data":"377c353e611731ad8435f2faa8cd17370fb13dcffdeef306e9a775fea6f9098f"} Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.762071 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qwhpp" Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.797705 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qwhpp"] Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.811803 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qwhpp"] Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.814632 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-77jdb"] Dec 02 14:20:33 crc kubenswrapper[4902]: I1202 14:20:33.817197 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-77jdb"] Dec 02 14:20:34 crc kubenswrapper[4902]: E1202 14:20:34.829740 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1d233f46761f6b72a84d0d3f21c6beb0e088eae64b7f16ccbebdfdfc53896064 is running failed: container process not found" containerID="1d233f46761f6b72a84d0d3f21c6beb0e088eae64b7f16ccbebdfdfc53896064" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 14:20:34 crc kubenswrapper[4902]: E1202 14:20:34.831112 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1d233f46761f6b72a84d0d3f21c6beb0e088eae64b7f16ccbebdfdfc53896064 is running failed: container process not found" containerID="1d233f46761f6b72a84d0d3f21c6beb0e088eae64b7f16ccbebdfdfc53896064" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 14:20:34 crc kubenswrapper[4902]: E1202 14:20:34.831763 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1d233f46761f6b72a84d0d3f21c6beb0e088eae64b7f16ccbebdfdfc53896064 is running failed: container process not found" containerID="1d233f46761f6b72a84d0d3f21c6beb0e088eae64b7f16ccbebdfdfc53896064" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 14:20:34 crc kubenswrapper[4902]: E1202 14:20:34.831879 4902 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1d233f46761f6b72a84d0d3f21c6beb0e088eae64b7f16ccbebdfdfc53896064 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-9tzqn" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" containerName="registry-server" Dec 02 14:20:35 crc kubenswrapper[4902]: I1202 14:20:35.117951 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a1c73ba-580d-4532-b7d7-8e263943bb06" path="/var/lib/kubelet/pods/3a1c73ba-580d-4532-b7d7-8e263943bb06/volumes" Dec 02 14:20:35 crc kubenswrapper[4902]: I1202 14:20:35.118726 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" path="/var/lib/kubelet/pods/f9a5fd22-f2a6-450e-a1f5-d5787f6368a7/volumes" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.036550 4902 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.037304 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da07947e-84ca-4cc6-9e55-f6b99bfca9c6" containerName="pruner" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.037326 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="da07947e-84ca-4cc6-9e55-f6b99bfca9c6" containerName="pruner" Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.037344 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a1c73ba-580d-4532-b7d7-8e263943bb06" containerName="registry-server" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.037357 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a1c73ba-580d-4532-b7d7-8e263943bb06" containerName="registry-server" Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.037380 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" containerName="extract-content" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.037392 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" containerName="extract-content" Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.037414 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" containerName="registry-server" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.037426 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" containerName="registry-server" Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.037441 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a1c73ba-580d-4532-b7d7-8e263943bb06" containerName="extract-utilities" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.037453 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a1c73ba-580d-4532-b7d7-8e263943bb06" containerName="extract-utilities" Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.037473 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4128c863-697c-459c-954f-f3226bd690ea" containerName="registry-server" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.037486 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="4128c863-697c-459c-954f-f3226bd690ea" containerName="registry-server" Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.037500 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a1c73ba-580d-4532-b7d7-8e263943bb06" containerName="extract-content" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.037514 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a1c73ba-580d-4532-b7d7-8e263943bb06" containerName="extract-content" Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.037536 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4128c863-697c-459c-954f-f3226bd690ea" containerName="extract-utilities" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.037549 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="4128c863-697c-459c-954f-f3226bd690ea" containerName="extract-utilities" Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.037597 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4128c863-697c-459c-954f-f3226bd690ea" containerName="extract-content" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.037610 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="4128c863-697c-459c-954f-f3226bd690ea" containerName="extract-content" Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.037631 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" containerName="extract-utilities" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.037643 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" containerName="extract-utilities" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.037844 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="da07947e-84ca-4cc6-9e55-f6b99bfca9c6" containerName="pruner" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.037872 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="4128c863-697c-459c-954f-f3226bd690ea" containerName="registry-server" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.037898 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a5fd22-f2a6-450e-a1f5-d5787f6368a7" containerName="registry-server" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.037912 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a1c73ba-580d-4532-b7d7-8e263943bb06" containerName="registry-server" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.038398 4902 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.038512 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.038834 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b" gracePeriod=15 Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.038930 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004" gracePeriod=15 Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.038960 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106" gracePeriod=15 Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.038953 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b" gracePeriod=15 Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.039223 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676" gracePeriod=15 Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.040030 4902 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.040375 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.040394 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.040415 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.040429 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.040441 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.040456 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.040471 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.040485 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.040515 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.040528 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.040545 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.040558 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 14:20:39 crc kubenswrapper[4902]: E1202 14:20:39.040625 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.040638 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.040869 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.040887 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.040899 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.040911 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.040920 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.041151 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.087410 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.114111 4902 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.138989 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.139167 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.139239 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.139330 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.140381 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.140465 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.140494 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.140527 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242155 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242233 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242287 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242288 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242314 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242332 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242377 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242344 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242387 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242424 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242515 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242624 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242643 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242680 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242725 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.242853 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.292382 4902 scope.go:117] "RemoveContainer" containerID="7b2a1de65fadda7cc117c3bf959c2507a77c974e3c6b2537edd26f4f51671d73" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.346946 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.347807 4902 status_manager.go:851] "Failed to get status for pod" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" pod="openshift-marketplace/redhat-operators-9tzqn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9tzqn\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.381649 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.445322 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-utilities\") pod \"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2\" (UID: \"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2\") " Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.445523 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-catalog-content\") pod \"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2\" (UID: \"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2\") " Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.445597 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqsrs\" (UniqueName: \"kubernetes.io/projected/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-kube-api-access-nqsrs\") pod \"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2\" (UID: \"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2\") " Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.446538 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-utilities" (OuterVolumeSpecName: "utilities") pod "fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" (UID: "fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.454348 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-kube-api-access-nqsrs" (OuterVolumeSpecName: "kube-api-access-nqsrs") pod "fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" (UID: "fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2"). InnerVolumeSpecName "kube-api-access-nqsrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.546885 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.547203 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqsrs\" (UniqueName: \"kubernetes.io/projected/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-kube-api-access-nqsrs\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.796839 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" (UID: "fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.808333 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9tzqn" event={"ID":"fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2","Type":"ContainerDied","Data":"2576f93c92aa2c4831de88bf5955c7f8352049152170dc4c905d05e7ab73c0bb"} Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.808422 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9tzqn" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.809425 4902 status_manager.go:851] "Failed to get status for pod" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" pod="openshift-marketplace/redhat-operators-9tzqn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9tzqn\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.812934 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.814749 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.815445 4902 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b" exitCode=0 Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.815469 4902 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676" exitCode=0 Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.815480 4902 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004" exitCode=0 Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.815489 4902 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106" exitCode=2 Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.822858 4902 status_manager.go:851] "Failed to get status for pod" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" pod="openshift-marketplace/redhat-operators-9tzqn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9tzqn\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:39 crc kubenswrapper[4902]: I1202 14:20:39.851207 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:40 crc kubenswrapper[4902]: I1202 14:20:40.825500 4902 generic.go:334] "Generic (PLEG): container finished" podID="33461604-9b05-4934-90f2-1642fdd1c40a" containerID="9846b686acb01071f330f33d5872128b19492869e7fc918c05a323c7acab627e" exitCode=0 Dec 02 14:20:40 crc kubenswrapper[4902]: I1202 14:20:40.825595 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"33461604-9b05-4934-90f2-1642fdd1c40a","Type":"ContainerDied","Data":"9846b686acb01071f330f33d5872128b19492869e7fc918c05a323c7acab627e"} Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:40.826895 4902 status_manager.go:851] "Failed to get status for pod" podUID="33461604-9b05-4934-90f2-1642fdd1c40a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:40.827405 4902 status_manager.go:851] "Failed to get status for pod" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" pod="openshift-marketplace/redhat-operators-9tzqn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9tzqn\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.753741 4902 scope.go:117] "RemoveContainer" containerID="2ed0e85c95797472bbe3856122c454e580b6cd6ab396f40df5e3f5e04aaf8084" Dec 02 14:20:41 crc kubenswrapper[4902]: E1202 14:20:41.759478 4902 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.251:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-operators-t89xz.187d6be0af613546 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-operators-t89xz,UID:caf53037-752b-4821-8e41-81976a0a0554,APIVersion:v1,ResourceVersion:28519,FieldPath:spec.containers{registry-server},},Reason:Pulled,Message:Successfully pulled image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\" in 22.632s (22.632s including waiting). Image size: 907837715 bytes.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 14:20:41.75833223 +0000 UTC m=+272.949640969,LastTimestamp:2025-12-02 14:20:41.75833223 +0000 UTC m=+272.949640969,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.841772 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"fb0c102052a182f5b49bb1a619faf7e545f2b89dd780e98777d240c235d53cea"} Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.846742 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.852998 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.864830 4902 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b" exitCode=0 Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.864960 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ee2a6dfa377dbe41cb995b7353ed75fe4d26eb2b199b34a804c1f0743484d7d" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.869353 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.873142 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.876258 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.876948 4902 status_manager.go:851] "Failed to get status for pod" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" pod="openshift-marketplace/redhat-operators-9tzqn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9tzqn\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.877506 4902 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.877775 4902 status_manager.go:851] "Failed to get status for pod" podUID="33461604-9b05-4934-90f2-1642fdd1c40a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.885273 4902 scope.go:117] "RemoveContainer" containerID="1f0ec12e4c1fb79d28cad9f4a04ef5dcb095e2ba406c7d5e39e9333ee16f9314" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.933827 4902 scope.go:117] "RemoveContainer" containerID="28a76e61b8cd97e5f86591f6fb5db562554c9d1394be8de648acbaf9760bb152" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.950386 4902 scope.go:117] "RemoveContainer" containerID="b6a6ebded4979b7fa01c80ad15a81ac98961f83406a0b5ddd454d23089a6e46f" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.962629 4902 scope.go:117] "RemoveContainer" containerID="58635d73c77ce279ceb691ea4df7c1c7bbf577cdb203c42ab02d6e31609ec314" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.978595 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.978707 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.978733 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.978868 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.978902 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.979014 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.979207 4902 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.979229 4902 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.979259 4902 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:41 crc kubenswrapper[4902]: I1202 14:20:41.979846 4902 scope.go:117] "RemoveContainer" containerID="8660c7c3c261e6cc3a08edebb420b30cc0b126459fd0d3df509b2d5a8c6e96e5" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.000682 4902 scope.go:117] "RemoveContainer" containerID="1d233f46761f6b72a84d0d3f21c6beb0e088eae64b7f16ccbebdfdfc53896064" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.020752 4902 scope.go:117] "RemoveContainer" containerID="b3ff5ffd1daba04c4ae0d603f165efb04e8909b196ff2c4638e3708392ed8935" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.044248 4902 scope.go:117] "RemoveContainer" containerID="add58b79d77c262f4e34f258003ac5593059c0afef141ab49281617205bbaf01" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.089226 4902 scope.go:117] "RemoveContainer" containerID="2b7149968fa6eee24955079a51479ad1aa8734d0ed50a5f2fee3f06f1a7fda45" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.119102 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.119658 4902 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.120027 4902 status_manager.go:851] "Failed to get status for pod" podUID="33461604-9b05-4934-90f2-1642fdd1c40a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.120380 4902 status_manager.go:851] "Failed to get status for pod" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" pod="openshift-marketplace/redhat-operators-9tzqn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9tzqn\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.181525 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/33461604-9b05-4934-90f2-1642fdd1c40a-var-lock\") pod \"33461604-9b05-4934-90f2-1642fdd1c40a\" (UID: \"33461604-9b05-4934-90f2-1642fdd1c40a\") " Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.181613 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/33461604-9b05-4934-90f2-1642fdd1c40a-var-lock" (OuterVolumeSpecName: "var-lock") pod "33461604-9b05-4934-90f2-1642fdd1c40a" (UID: "33461604-9b05-4934-90f2-1642fdd1c40a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.182168 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/33461604-9b05-4934-90f2-1642fdd1c40a-kubelet-dir\") pod \"33461604-9b05-4934-90f2-1642fdd1c40a\" (UID: \"33461604-9b05-4934-90f2-1642fdd1c40a\") " Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.182255 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33461604-9b05-4934-90f2-1642fdd1c40a-kube-api-access\") pod \"33461604-9b05-4934-90f2-1642fdd1c40a\" (UID: \"33461604-9b05-4934-90f2-1642fdd1c40a\") " Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.182392 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/33461604-9b05-4934-90f2-1642fdd1c40a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "33461604-9b05-4934-90f2-1642fdd1c40a" (UID: "33461604-9b05-4934-90f2-1642fdd1c40a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.182880 4902 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/33461604-9b05-4934-90f2-1642fdd1c40a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.182904 4902 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/33461604-9b05-4934-90f2-1642fdd1c40a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.188236 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33461604-9b05-4934-90f2-1642fdd1c40a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "33461604-9b05-4934-90f2-1642fdd1c40a" (UID: "33461604-9b05-4934-90f2-1642fdd1c40a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.284665 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33461604-9b05-4934-90f2-1642fdd1c40a-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.883369 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.885342 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.886651 4902 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.887057 4902 status_manager.go:851] "Failed to get status for pod" podUID="33461604-9b05-4934-90f2-1642fdd1c40a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.887708 4902 status_manager.go:851] "Failed to get status for pod" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" pod="openshift-marketplace/redhat-operators-9tzqn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9tzqn\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.890968 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg77b" event={"ID":"ab032431-54e6-42b3-b682-8c18f5f29f81","Type":"ContainerStarted","Data":"a50d5ef16512d0d88ca5e965cf989f00cd86606a218a98059ba8c41905969b8b"} Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.891618 4902 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.892076 4902 status_manager.go:851] "Failed to get status for pod" podUID="33461604-9b05-4934-90f2-1642fdd1c40a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.892377 4902 status_manager.go:851] "Failed to get status for pod" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" pod="openshift-marketplace/redhat-operators-9tzqn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9tzqn\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.892695 4902 status_manager.go:851] "Failed to get status for pod" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" pod="openshift-marketplace/community-operators-cg77b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-cg77b\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.893807 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t89xz" event={"ID":"caf53037-752b-4821-8e41-81976a0a0554","Type":"ContainerStarted","Data":"4532435f02023fee055c5bd23cc1644c9ab42b9a45e5bb91607b7aba3cbb8e96"} Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.894406 4902 status_manager.go:851] "Failed to get status for pod" podUID="33461604-9b05-4934-90f2-1642fdd1c40a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.894734 4902 status_manager.go:851] "Failed to get status for pod" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" pod="openshift-marketplace/redhat-operators-9tzqn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9tzqn\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.895029 4902 status_manager.go:851] "Failed to get status for pod" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" pod="openshift-marketplace/community-operators-cg77b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-cg77b\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.895315 4902 status_manager.go:851] "Failed to get status for pod" podUID="caf53037-752b-4821-8e41-81976a0a0554" pod="openshift-marketplace/redhat-operators-t89xz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t89xz\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.895635 4902 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.897153 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"33461604-9b05-4934-90f2-1642fdd1c40a","Type":"ContainerDied","Data":"5d66aeb49f95b07c0b02eba92075b1b1786a6e0a2f686efe8e80c5bfb45c9057"} Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.897197 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d66aeb49f95b07c0b02eba92075b1b1786a6e0a2f686efe8e80c5bfb45c9057" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.897200 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.900620 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"9c5f30507b69fa8d68413f5ee42212cfd6c13f9ed5c6f0fbc52bc56f7606c00f"} Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.901012 4902 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.901357 4902 status_manager.go:851] "Failed to get status for pod" podUID="33461604-9b05-4934-90f2-1642fdd1c40a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.901669 4902 status_manager.go:851] "Failed to get status for pod" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" pod="openshift-marketplace/redhat-operators-9tzqn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9tzqn\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.902189 4902 status_manager.go:851] "Failed to get status for pod" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" pod="openshift-marketplace/community-operators-cg77b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-cg77b\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.902556 4902 status_manager.go:851] "Failed to get status for pod" podUID="caf53037-752b-4821-8e41-81976a0a0554" pod="openshift-marketplace/redhat-operators-t89xz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t89xz\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.910207 4902 status_manager.go:851] "Failed to get status for pod" podUID="caf53037-752b-4821-8e41-81976a0a0554" pod="openshift-marketplace/redhat-operators-t89xz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t89xz\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.910493 4902 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.910815 4902 status_manager.go:851] "Failed to get status for pod" podUID="33461604-9b05-4934-90f2-1642fdd1c40a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.911160 4902 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.911309 4902 status_manager.go:851] "Failed to get status for pod" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" pod="openshift-marketplace/redhat-operators-9tzqn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9tzqn\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.911463 4902 status_manager.go:851] "Failed to get status for pod" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" pod="openshift-marketplace/community-operators-cg77b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-cg77b\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.916404 4902 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.916962 4902 status_manager.go:851] "Failed to get status for pod" podUID="33461604-9b05-4934-90f2-1642fdd1c40a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.917147 4902 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.917288 4902 status_manager.go:851] "Failed to get status for pod" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" pod="openshift-marketplace/redhat-operators-9tzqn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9tzqn\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.917425 4902 status_manager.go:851] "Failed to get status for pod" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" pod="openshift-marketplace/community-operators-cg77b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-cg77b\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:42 crc kubenswrapper[4902]: I1202 14:20:42.917624 4902 status_manager.go:851] "Failed to get status for pod" podUID="caf53037-752b-4821-8e41-81976a0a0554" pod="openshift-marketplace/redhat-operators-t89xz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t89xz\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:43 crc kubenswrapper[4902]: I1202 14:20:43.113658 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 02 14:20:43 crc kubenswrapper[4902]: E1202 14:20:43.472878 4902 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:43 crc kubenswrapper[4902]: E1202 14:20:43.473505 4902 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:43 crc kubenswrapper[4902]: E1202 14:20:43.474205 4902 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:43 crc kubenswrapper[4902]: E1202 14:20:43.474496 4902 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:43 crc kubenswrapper[4902]: E1202 14:20:43.474985 4902 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:43 crc kubenswrapper[4902]: I1202 14:20:43.475041 4902 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 02 14:20:43 crc kubenswrapper[4902]: E1202 14:20:43.475457 4902 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" interval="200ms" Dec 02 14:20:43 crc kubenswrapper[4902]: E1202 14:20:43.676492 4902 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" interval="400ms" Dec 02 14:20:44 crc kubenswrapper[4902]: E1202 14:20:44.077517 4902 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" interval="800ms" Dec 02 14:20:44 crc kubenswrapper[4902]: I1202 14:20:44.354121 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:20:44 crc kubenswrapper[4902]: I1202 14:20:44.354174 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:20:44 crc kubenswrapper[4902]: E1202 14:20:44.878490 4902 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" interval="1.6s" Dec 02 14:20:45 crc kubenswrapper[4902]: I1202 14:20:45.410450 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-t89xz" podUID="caf53037-752b-4821-8e41-81976a0a0554" containerName="registry-server" probeResult="failure" output=< Dec 02 14:20:45 crc kubenswrapper[4902]: timeout: failed to connect service ":50051" within 1s Dec 02 14:20:45 crc kubenswrapper[4902]: > Dec 02 14:20:46 crc kubenswrapper[4902]: E1202 14:20:46.479750 4902 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" interval="3.2s" Dec 02 14:20:46 crc kubenswrapper[4902]: E1202 14:20:46.768841 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:20:46Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:20:46Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:20:46Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T14:20:46Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:30260b3b9c0e23b61cfe4359af1f463bc579d56c6d58a735735bf9133899e1fa\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:db76f14ddbe9bd8a390f0030a23cbfcb3693b5be7a99737dcb8c75f14bb6a075\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1607829346},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1f2a98779239dfafa38d3fb89250a2691f75894c155b5c43fcc421a653bf9273\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:6a549becfb2bf10c272884c5858c442eeaa5b3eb8a726dc460b0a79d0164f7ed\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1204220237},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:49722d5b6c5e4dce00fc02377ef6f08c98eda963a9fc778548345ae50c78c808\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:8d6bad38e0a0f7fa6c75d02682ab72fc69250f5b71c633f6181ac9dceb9b7cbe\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1201263443},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:e8990432556acad31519b1a73ec32f32d27c2034cf9e5cc4db8980efc7331594\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:ebe9f523f5c211a3a0f2570331dddcd5be15b12c1fecd9b8b121f881bfaad029\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1129027903},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:46 crc kubenswrapper[4902]: E1202 14:20:46.769307 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:46 crc kubenswrapper[4902]: E1202 14:20:46.769915 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:46 crc kubenswrapper[4902]: E1202 14:20:46.770710 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:46 crc kubenswrapper[4902]: E1202 14:20:46.771772 4902 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:46 crc kubenswrapper[4902]: E1202 14:20:46.771811 4902 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 14:20:48 crc kubenswrapper[4902]: E1202 14:20:48.824759 4902 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.251:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-operators-t89xz.187d6be0af613546 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-operators-t89xz,UID:caf53037-752b-4821-8e41-81976a0a0554,APIVersion:v1,ResourceVersion:28519,FieldPath:spec.containers{registry-server},},Reason:Pulled,Message:Successfully pulled image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\" in 22.632s (22.632s including waiting). Image size: 907837715 bytes.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 14:20:41.75833223 +0000 UTC m=+272.949640969,LastTimestamp:2025-12-02 14:20:41.75833223 +0000 UTC m=+272.949640969,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 14:20:49 crc kubenswrapper[4902]: I1202 14:20:49.112826 4902 status_manager.go:851] "Failed to get status for pod" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" pod="openshift-marketplace/redhat-operators-9tzqn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9tzqn\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:49 crc kubenswrapper[4902]: I1202 14:20:49.113379 4902 status_manager.go:851] "Failed to get status for pod" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" pod="openshift-marketplace/community-operators-cg77b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-cg77b\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:49 crc kubenswrapper[4902]: I1202 14:20:49.113790 4902 status_manager.go:851] "Failed to get status for pod" podUID="caf53037-752b-4821-8e41-81976a0a0554" pod="openshift-marketplace/redhat-operators-t89xz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t89xz\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:49 crc kubenswrapper[4902]: I1202 14:20:49.114273 4902 status_manager.go:851] "Failed to get status for pod" podUID="33461604-9b05-4934-90f2-1642fdd1c40a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:49 crc kubenswrapper[4902]: I1202 14:20:49.115447 4902 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:49 crc kubenswrapper[4902]: E1202 14:20:49.680331 4902 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.251:6443: connect: connection refused" interval="6.4s" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.106207 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.107295 4902 status_manager.go:851] "Failed to get status for pod" podUID="33461604-9b05-4934-90f2-1642fdd1c40a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.107954 4902 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.108274 4902 status_manager.go:851] "Failed to get status for pod" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" pod="openshift-marketplace/redhat-operators-9tzqn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9tzqn\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.108531 4902 status_manager.go:851] "Failed to get status for pod" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" pod="openshift-marketplace/community-operators-cg77b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-cg77b\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.108763 4902 status_manager.go:851] "Failed to get status for pod" podUID="caf53037-752b-4821-8e41-81976a0a0554" pod="openshift-marketplace/redhat-operators-t89xz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t89xz\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.119768 4902 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="81d41cbd-844a-417a-908f-1c7861a696cc" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.119828 4902 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="81d41cbd-844a-417a-908f-1c7861a696cc" Dec 02 14:20:50 crc kubenswrapper[4902]: E1202 14:20:50.120550 4902 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.121383 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.949667 4902 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="9652f19567ddcccc52cee9c33af4d50997b52e34b5481d525cdcd72061fb9969" exitCode=0 Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.949948 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"9652f19567ddcccc52cee9c33af4d50997b52e34b5481d525cdcd72061fb9969"} Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.949979 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"04bc5a708ef126f0e3d484cad60727dbe684d7a56d2373dae1aeab9be5361599"} Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.950272 4902 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="81d41cbd-844a-417a-908f-1c7861a696cc" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.950289 4902 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="81d41cbd-844a-417a-908f-1c7861a696cc" Dec 02 14:20:50 crc kubenswrapper[4902]: E1202 14:20:50.950740 4902 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.950740 4902 status_manager.go:851] "Failed to get status for pod" podUID="33461604-9b05-4934-90f2-1642fdd1c40a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.951170 4902 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.951432 4902 status_manager.go:851] "Failed to get status for pod" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" pod="openshift-marketplace/redhat-operators-9tzqn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9tzqn\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.951846 4902 status_manager.go:851] "Failed to get status for pod" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" pod="openshift-marketplace/community-operators-cg77b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-cg77b\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:50 crc kubenswrapper[4902]: I1202 14:20:50.952277 4902 status_manager.go:851] "Failed to get status for pod" podUID="caf53037-752b-4821-8e41-81976a0a0554" pod="openshift-marketplace/redhat-operators-t89xz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t89xz\": dial tcp 38.102.83.251:6443: connect: connection refused" Dec 02 14:20:51 crc kubenswrapper[4902]: I1202 14:20:51.480901 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:20:51 crc kubenswrapper[4902]: I1202 14:20:51.481268 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:20:51 crc kubenswrapper[4902]: I1202 14:20:51.519589 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:20:51 crc kubenswrapper[4902]: I1202 14:20:51.958471 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d71bb9104b69ef6ced64cfee9e4bcecf5edea98ad2bf4d1ae175bc357ae9e729"} Dec 02 14:20:51 crc kubenswrapper[4902]: I1202 14:20:51.958860 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4c062b05777cfcb68b9f5b197147867c05365779232bbe222ab55de4f3ec2049"} Dec 02 14:20:51 crc kubenswrapper[4902]: I1202 14:20:51.958872 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"6a3d270f8004ae70de3b4b8f985a8d229f2eb8960e39a4f9026920afebd4fb50"} Dec 02 14:20:51 crc kubenswrapper[4902]: I1202 14:20:51.958881 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a46e302d275c001edc8a8535193ad37669f4928405708dea9c583c639ac66738"} Dec 02 14:20:52 crc kubenswrapper[4902]: I1202 14:20:52.012495 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:20:52 crc kubenswrapper[4902]: I1202 14:20:52.964405 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 02 14:20:52 crc kubenswrapper[4902]: I1202 14:20:52.964454 4902 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e" exitCode=1 Dec 02 14:20:52 crc kubenswrapper[4902]: I1202 14:20:52.964513 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e"} Dec 02 14:20:52 crc kubenswrapper[4902]: I1202 14:20:52.964970 4902 scope.go:117] "RemoveContainer" containerID="1c75a09cfb07684fb45c15c36f72db35383a3d50256c9c2bb3ea4a31fe0cc14e" Dec 02 14:20:52 crc kubenswrapper[4902]: I1202 14:20:52.967987 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"aa695c19cb9b309c564cf7bb418b740711bd9eae152ba7cfcb1ae3b19784877d"} Dec 02 14:20:52 crc kubenswrapper[4902]: I1202 14:20:52.968303 4902 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="81d41cbd-844a-417a-908f-1c7861a696cc" Dec 02 14:20:52 crc kubenswrapper[4902]: I1202 14:20:52.968345 4902 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="81d41cbd-844a-417a-908f-1c7861a696cc" Dec 02 14:20:53 crc kubenswrapper[4902]: I1202 14:20:53.975801 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 02 14:20:53 crc kubenswrapper[4902]: I1202 14:20:53.976138 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4f3193ede7a4e50f2441462c75023a40e0bdb97263c699360b621ee4d3db1753"} Dec 02 14:20:54 crc kubenswrapper[4902]: I1202 14:20:54.397394 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:20:54 crc kubenswrapper[4902]: I1202 14:20:54.432856 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:20:55 crc kubenswrapper[4902]: I1202 14:20:55.127712 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:55 crc kubenswrapper[4902]: I1202 14:20:55.128143 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:55 crc kubenswrapper[4902]: I1202 14:20:55.135844 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:55 crc kubenswrapper[4902]: I1202 14:20:55.154701 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:20:57 crc kubenswrapper[4902]: I1202 14:20:57.414677 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:20:57 crc kubenswrapper[4902]: I1202 14:20:57.418268 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:20:57 crc kubenswrapper[4902]: I1202 14:20:57.976182 4902 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:57 crc kubenswrapper[4902]: I1202 14:20:57.997354 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:57 crc kubenswrapper[4902]: I1202 14:20:57.997448 4902 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="81d41cbd-844a-417a-908f-1c7861a696cc" Dec 02 14:20:57 crc kubenswrapper[4902]: I1202 14:20:57.997477 4902 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="81d41cbd-844a-417a-908f-1c7861a696cc" Dec 02 14:20:58 crc kubenswrapper[4902]: I1202 14:20:58.001243 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:20:59 crc kubenswrapper[4902]: I1202 14:20:59.002453 4902 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="81d41cbd-844a-417a-908f-1c7861a696cc" Dec 02 14:20:59 crc kubenswrapper[4902]: I1202 14:20:59.002483 4902 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="81d41cbd-844a-417a-908f-1c7861a696cc" Dec 02 14:20:59 crc kubenswrapper[4902]: I1202 14:20:59.120766 4902 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="9a3a507d-a9f4-45fe-931a-1b0639d41cbb" Dec 02 14:21:00 crc kubenswrapper[4902]: I1202 14:21:00.008299 4902 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="81d41cbd-844a-417a-908f-1c7861a696cc" Dec 02 14:21:00 crc kubenswrapper[4902]: I1202 14:21:00.008346 4902 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="81d41cbd-844a-417a-908f-1c7861a696cc" Dec 02 14:21:00 crc kubenswrapper[4902]: I1202 14:21:00.013633 4902 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="9a3a507d-a9f4-45fe-931a-1b0639d41cbb" Dec 02 14:21:05 crc kubenswrapper[4902]: I1202 14:21:05.160913 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 14:21:05 crc kubenswrapper[4902]: I1202 14:21:05.700285 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 02 14:21:05 crc kubenswrapper[4902]: I1202 14:21:05.799354 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 02 14:21:05 crc kubenswrapper[4902]: I1202 14:21:05.986178 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 02 14:21:05 crc kubenswrapper[4902]: I1202 14:21:05.988396 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 02 14:21:06 crc kubenswrapper[4902]: I1202 14:21:06.506053 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 02 14:21:07 crc kubenswrapper[4902]: I1202 14:21:07.905866 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 02 14:21:08 crc kubenswrapper[4902]: I1202 14:21:08.204487 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 02 14:21:08 crc kubenswrapper[4902]: I1202 14:21:08.973700 4902 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 02 14:21:09 crc kubenswrapper[4902]: I1202 14:21:09.703004 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 02 14:21:09 crc kubenswrapper[4902]: I1202 14:21:09.745431 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 02 14:21:09 crc kubenswrapper[4902]: I1202 14:21:09.795913 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 02 14:21:10 crc kubenswrapper[4902]: I1202 14:21:10.006064 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 02 14:21:10 crc kubenswrapper[4902]: I1202 14:21:10.080329 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 02 14:21:10 crc kubenswrapper[4902]: I1202 14:21:10.105524 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 02 14:21:10 crc kubenswrapper[4902]: I1202 14:21:10.350064 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 02 14:21:10 crc kubenswrapper[4902]: I1202 14:21:10.437800 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 02 14:21:10 crc kubenswrapper[4902]: I1202 14:21:10.516798 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 02 14:21:10 crc kubenswrapper[4902]: I1202 14:21:10.751106 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 02 14:21:10 crc kubenswrapper[4902]: I1202 14:21:10.798608 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 02 14:21:10 crc kubenswrapper[4902]: I1202 14:21:10.823024 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 02 14:21:10 crc kubenswrapper[4902]: I1202 14:21:10.990424 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 02 14:21:11 crc kubenswrapper[4902]: I1202 14:21:11.308399 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 02 14:21:11 crc kubenswrapper[4902]: I1202 14:21:11.425857 4902 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 02 14:21:11 crc kubenswrapper[4902]: I1202 14:21:11.427322 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-t89xz" podStartSLOduration=51.067884218 podStartE2EDuration="2m17.427299557s" podCreationTimestamp="2025-12-02 14:18:54 +0000 UTC" firstStartedPulling="2025-12-02 14:19:15.39890029 +0000 UTC m=+186.590208999" lastFinishedPulling="2025-12-02 14:20:41.758315599 +0000 UTC m=+272.949624338" observedRunningTime="2025-12-02 14:20:57.863420445 +0000 UTC m=+289.054729174" watchObservedRunningTime="2025-12-02 14:21:11.427299557 +0000 UTC m=+302.618608306" Dec 02 14:21:11 crc kubenswrapper[4902]: I1202 14:21:11.430012 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cg77b" podStartSLOduration=32.951692026 podStartE2EDuration="2m20.429998625s" podCreationTimestamp="2025-12-02 14:18:51 +0000 UTC" firstStartedPulling="2025-12-02 14:18:54.315871711 +0000 UTC m=+165.507180420" lastFinishedPulling="2025-12-02 14:20:41.79417827 +0000 UTC m=+272.985487019" observedRunningTime="2025-12-02 14:20:57.829952083 +0000 UTC m=+289.021260802" watchObservedRunningTime="2025-12-02 14:21:11.429998625 +0000 UTC m=+302.621307374" Dec 02 14:21:11 crc kubenswrapper[4902]: I1202 14:21:11.431177 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=32.431169038 podStartE2EDuration="32.431169038s" podCreationTimestamp="2025-12-02 14:20:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:20:57.786658328 +0000 UTC m=+288.977967057" watchObservedRunningTime="2025-12-02 14:21:11.431169038 +0000 UTC m=+302.622477777" Dec 02 14:21:11 crc kubenswrapper[4902]: I1202 14:21:11.433504 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9tzqn","openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 14:21:11 crc kubenswrapper[4902]: I1202 14:21:11.433623 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 14:21:11 crc kubenswrapper[4902]: I1202 14:21:11.434428 4902 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="81d41cbd-844a-417a-908f-1c7861a696cc" Dec 02 14:21:11 crc kubenswrapper[4902]: I1202 14:21:11.434477 4902 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="81d41cbd-844a-417a-908f-1c7861a696cc" Dec 02 14:21:11 crc kubenswrapper[4902]: I1202 14:21:11.441395 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 14:21:11 crc kubenswrapper[4902]: I1202 14:21:11.472913 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=14.472882428 podStartE2EDuration="14.472882428s" podCreationTimestamp="2025-12-02 14:20:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:21:11.467240085 +0000 UTC m=+302.658548834" watchObservedRunningTime="2025-12-02 14:21:11.472882428 +0000 UTC m=+302.664191207" Dec 02 14:21:11 crc kubenswrapper[4902]: I1202 14:21:11.770675 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 02 14:21:11 crc kubenswrapper[4902]: I1202 14:21:11.803863 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 02 14:21:11 crc kubenswrapper[4902]: I1202 14:21:11.967524 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 02 14:21:12 crc kubenswrapper[4902]: I1202 14:21:12.016300 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 02 14:21:12 crc kubenswrapper[4902]: I1202 14:21:12.025090 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 02 14:21:12 crc kubenswrapper[4902]: I1202 14:21:12.111524 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 02 14:21:12 crc kubenswrapper[4902]: I1202 14:21:12.132185 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 02 14:21:12 crc kubenswrapper[4902]: I1202 14:21:12.272856 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 02 14:21:12 crc kubenswrapper[4902]: I1202 14:21:12.341354 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 02 14:21:12 crc kubenswrapper[4902]: I1202 14:21:12.433387 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 14:21:12 crc kubenswrapper[4902]: I1202 14:21:12.637039 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 02 14:21:12 crc kubenswrapper[4902]: I1202 14:21:12.842354 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 02 14:21:12 crc kubenswrapper[4902]: I1202 14:21:12.849206 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 02 14:21:12 crc kubenswrapper[4902]: I1202 14:21:12.976456 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.101045 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.109362 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.121665 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" path="/var/lib/kubelet/pods/fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2/volumes" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.135192 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.250687 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.357963 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.402058 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.519672 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.526017 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.548022 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.593966 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.656685 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.787861 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.792515 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.804668 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 02 14:21:13 crc kubenswrapper[4902]: I1202 14:21:13.889300 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 02 14:21:14 crc kubenswrapper[4902]: I1202 14:21:14.016811 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 14:21:14 crc kubenswrapper[4902]: I1202 14:21:14.069471 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 02 14:21:14 crc kubenswrapper[4902]: I1202 14:21:14.103517 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 02 14:21:14 crc kubenswrapper[4902]: I1202 14:21:14.126787 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 02 14:21:14 crc kubenswrapper[4902]: I1202 14:21:14.180688 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 02 14:21:14 crc kubenswrapper[4902]: I1202 14:21:14.212054 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 02 14:21:14 crc kubenswrapper[4902]: I1202 14:21:14.288319 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 02 14:21:14 crc kubenswrapper[4902]: I1202 14:21:14.699085 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 02 14:21:14 crc kubenswrapper[4902]: I1202 14:21:14.845367 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 02 14:21:14 crc kubenswrapper[4902]: I1202 14:21:14.883624 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 02 14:21:14 crc kubenswrapper[4902]: I1202 14:21:14.885961 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.039552 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.051736 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.062953 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.075015 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.115011 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.265840 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.352654 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.459180 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.496131 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.537830 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.546225 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.561366 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.578826 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.586061 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.595816 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.641106 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.669927 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.682043 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.713069 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.842204 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.920771 4902 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 02 14:21:15 crc kubenswrapper[4902]: I1202 14:21:15.928336 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.128306 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.177110 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.200760 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.203216 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.326821 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.483915 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.536126 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.542396 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.639089 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.655518 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.727689 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.734547 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.756675 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.764454 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.780393 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.807129 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.842129 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 02 14:21:16 crc kubenswrapper[4902]: I1202 14:21:16.937649 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 02 14:21:17 crc kubenswrapper[4902]: I1202 14:21:17.151836 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 02 14:21:17 crc kubenswrapper[4902]: I1202 14:21:17.231704 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 02 14:21:17 crc kubenswrapper[4902]: I1202 14:21:17.258425 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 14:21:17 crc kubenswrapper[4902]: I1202 14:21:17.313995 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 14:21:17 crc kubenswrapper[4902]: I1202 14:21:17.335493 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 02 14:21:17 crc kubenswrapper[4902]: I1202 14:21:17.443300 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 02 14:21:17 crc kubenswrapper[4902]: I1202 14:21:17.527780 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 02 14:21:17 crc kubenswrapper[4902]: I1202 14:21:17.585532 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 02 14:21:17 crc kubenswrapper[4902]: I1202 14:21:17.619479 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 02 14:21:17 crc kubenswrapper[4902]: I1202 14:21:17.724835 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 02 14:21:17 crc kubenswrapper[4902]: I1202 14:21:17.729254 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 02 14:21:17 crc kubenswrapper[4902]: I1202 14:21:17.756136 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 02 14:21:17 crc kubenswrapper[4902]: I1202 14:21:17.881205 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 02 14:21:17 crc kubenswrapper[4902]: I1202 14:21:17.931448 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 02 14:21:18 crc kubenswrapper[4902]: I1202 14:21:18.089159 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 02 14:21:18 crc kubenswrapper[4902]: I1202 14:21:18.170703 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 02 14:21:18 crc kubenswrapper[4902]: I1202 14:21:18.236593 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 02 14:21:18 crc kubenswrapper[4902]: I1202 14:21:18.300206 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 02 14:21:18 crc kubenswrapper[4902]: I1202 14:21:18.305133 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 02 14:21:18 crc kubenswrapper[4902]: I1202 14:21:18.413028 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 02 14:21:18 crc kubenswrapper[4902]: I1202 14:21:18.433833 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 02 14:21:18 crc kubenswrapper[4902]: I1202 14:21:18.450623 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 02 14:21:18 crc kubenswrapper[4902]: I1202 14:21:18.451404 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 14:21:18 crc kubenswrapper[4902]: I1202 14:21:18.608430 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 02 14:21:18 crc kubenswrapper[4902]: I1202 14:21:18.638886 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 02 14:21:18 crc kubenswrapper[4902]: I1202 14:21:18.793143 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 02 14:21:18 crc kubenswrapper[4902]: I1202 14:21:18.879613 4902 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 02 14:21:18 crc kubenswrapper[4902]: I1202 14:21:18.988053 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.006146 4902 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.056869 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.062263 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.081964 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.098230 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.115603 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.140277 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.196008 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.219022 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.243864 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.307657 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.322340 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.545832 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.562529 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.643089 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.645985 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.670889 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.780543 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 02 14:21:19 crc kubenswrapper[4902]: I1202 14:21:19.997633 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.080098 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.140010 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.140284 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.140512 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.204356 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.209831 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.309343 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.402130 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.445405 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.477217 4902 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.477474 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://9c5f30507b69fa8d68413f5ee42212cfd6c13f9ed5c6f0fbc52bc56f7606c00f" gracePeriod=5 Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.486188 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.540724 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.600087 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.667816 4902 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.680207 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.685105 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.720340 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.746073 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.797255 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.798495 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 02 14:21:20 crc kubenswrapper[4902]: I1202 14:21:20.815347 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.001996 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.044375 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.050169 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.069776 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.220386 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.270266 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.288866 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.293303 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.325757 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.363907 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.383692 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.392100 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.459628 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.519344 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.561017 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.599895 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.889492 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.891715 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 02 14:21:21 crc kubenswrapper[4902]: I1202 14:21:21.899194 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.038732 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.071011 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.145672 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.225115 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.315151 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.348114 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.566007 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.568007 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.592860 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.653251 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.670070 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.671918 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.680532 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.765601 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.808731 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 02 14:21:22 crc kubenswrapper[4902]: I1202 14:21:22.926644 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 02 14:21:23 crc kubenswrapper[4902]: I1202 14:21:23.004433 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 02 14:21:23 crc kubenswrapper[4902]: I1202 14:21:23.059860 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 02 14:21:23 crc kubenswrapper[4902]: I1202 14:21:23.139028 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 02 14:21:23 crc kubenswrapper[4902]: I1202 14:21:23.192450 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 02 14:21:23 crc kubenswrapper[4902]: I1202 14:21:23.221190 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 02 14:21:23 crc kubenswrapper[4902]: I1202 14:21:23.445341 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 02 14:21:23 crc kubenswrapper[4902]: I1202 14:21:23.535992 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 02 14:21:23 crc kubenswrapper[4902]: I1202 14:21:23.570060 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 02 14:21:23 crc kubenswrapper[4902]: I1202 14:21:23.574687 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 02 14:21:23 crc kubenswrapper[4902]: I1202 14:21:23.656230 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 02 14:21:23 crc kubenswrapper[4902]: I1202 14:21:23.784127 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 02 14:21:23 crc kubenswrapper[4902]: I1202 14:21:23.801895 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 14:21:23 crc kubenswrapper[4902]: I1202 14:21:23.840180 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 02 14:21:23 crc kubenswrapper[4902]: I1202 14:21:23.910468 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 02 14:21:23 crc kubenswrapper[4902]: I1202 14:21:23.991777 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.173759 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.174188 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.239230 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.397467 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.409076 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.410783 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.413732 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.469053 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.504487 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.525723 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.633493 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.647667 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.681296 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.714509 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.767704 4902 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.819544 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 02 14:21:24 crc kubenswrapper[4902]: I1202 14:21:24.927345 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 02 14:21:25 crc kubenswrapper[4902]: I1202 14:21:25.271490 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 14:21:25 crc kubenswrapper[4902]: I1202 14:21:25.317274 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 02 14:21:25 crc kubenswrapper[4902]: I1202 14:21:25.318962 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 02 14:21:25 crc kubenswrapper[4902]: I1202 14:21:25.447553 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 02 14:21:25 crc kubenswrapper[4902]: I1202 14:21:25.509499 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 02 14:21:25 crc kubenswrapper[4902]: I1202 14:21:25.568269 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 02 14:21:25 crc kubenswrapper[4902]: I1202 14:21:25.608867 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 02 14:21:25 crc kubenswrapper[4902]: I1202 14:21:25.773235 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.058401 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.160672 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.160731 4902 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="9c5f30507b69fa8d68413f5ee42212cfd6c13f9ed5c6f0fbc52bc56f7606c00f" exitCode=137 Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.273075 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.279007 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.383159 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.498707 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.687457 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.687554 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.812009 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.812072 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.812097 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.812155 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.812161 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.812217 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.812231 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.812488 4902 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.812506 4902 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.812591 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.812630 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.819875 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.913243 4902 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.913287 4902 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.913300 4902 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:26 crc kubenswrapper[4902]: I1202 14:21:26.943922 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 02 14:21:27 crc kubenswrapper[4902]: I1202 14:21:27.118290 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 02 14:21:27 crc kubenswrapper[4902]: I1202 14:21:27.119648 4902 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 02 14:21:27 crc kubenswrapper[4902]: I1202 14:21:27.135305 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 14:21:27 crc kubenswrapper[4902]: I1202 14:21:27.135375 4902 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="efb778fa-8fcc-4066-8cec-b3157d0fa531" Dec 02 14:21:27 crc kubenswrapper[4902]: I1202 14:21:27.142638 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 14:21:27 crc kubenswrapper[4902]: I1202 14:21:27.142683 4902 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="efb778fa-8fcc-4066-8cec-b3157d0fa531" Dec 02 14:21:27 crc kubenswrapper[4902]: I1202 14:21:27.166990 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 02 14:21:27 crc kubenswrapper[4902]: I1202 14:21:27.167062 4902 scope.go:117] "RemoveContainer" containerID="9c5f30507b69fa8d68413f5ee42212cfd6c13f9ed5c6f0fbc52bc56f7606c00f" Dec 02 14:21:27 crc kubenswrapper[4902]: I1202 14:21:27.167136 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 14:21:27 crc kubenswrapper[4902]: I1202 14:21:27.971220 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 02 14:21:28 crc kubenswrapper[4902]: I1202 14:21:28.296928 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.276926 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4wsv4"] Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.277919 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4wsv4" podUID="6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" containerName="registry-server" containerID="cri-o://f81fbd73c3996cb02e7e6af08328374e26b5a529abb56fdb3c320a3431aa23b7" gracePeriod=30 Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.283281 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cg77b"] Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.283635 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cg77b" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" containerName="registry-server" containerID="cri-o://a50d5ef16512d0d88ca5e965cf989f00cd86606a218a98059ba8c41905969b8b" gracePeriod=30 Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.291424 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ghgfc"] Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.291664 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" podUID="009e1183-d309-4988-b20c-54c056613558" containerName="marketplace-operator" containerID="cri-o://bd82e8e7456dd61aae0d1a95226c70d19f5f97300fcb0eb992c4898f3f807ad0" gracePeriod=30 Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.295354 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4f746"] Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.295514 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4f746" podUID="1467a1b3-3c32-4c86-a5ee-5e0f6f112519" containerName="registry-server" containerID="cri-o://bb7441fdff7728b078fb0a70f053a16da19513cb09c442a161767f8ea27cff7f" gracePeriod=30 Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.298908 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t89xz"] Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.299217 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-t89xz" podUID="caf53037-752b-4821-8e41-81976a0a0554" containerName="registry-server" containerID="cri-o://4532435f02023fee055c5bd23cc1644c9ab42b9a45e5bb91607b7aba3cbb8e96" gracePeriod=30 Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.330521 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vp2bb"] Dec 02 14:21:38 crc kubenswrapper[4902]: E1202 14:21:38.330781 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.330797 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 14:21:38 crc kubenswrapper[4902]: E1202 14:21:38.330809 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" containerName="extract-utilities" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.330818 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" containerName="extract-utilities" Dec 02 14:21:38 crc kubenswrapper[4902]: E1202 14:21:38.330830 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" containerName="extract-content" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.330838 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" containerName="extract-content" Dec 02 14:21:38 crc kubenswrapper[4902]: E1202 14:21:38.330851 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" containerName="registry-server" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.330859 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" containerName="registry-server" Dec 02 14:21:38 crc kubenswrapper[4902]: E1202 14:21:38.330872 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33461604-9b05-4934-90f2-1642fdd1c40a" containerName="installer" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.330880 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="33461604-9b05-4934-90f2-1642fdd1c40a" containerName="installer" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.330996 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa4e35f6-253d-41cd-a8b1-bd4d30cbb1b2" containerName="registry-server" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.331016 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="33461604-9b05-4934-90f2-1642fdd1c40a" containerName="installer" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.331023 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.331783 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.365271 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bfa7e05a-f673-4432-a9c5-e33f67589a10-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vp2bb\" (UID: \"bfa7e05a-f673-4432-a9c5-e33f67589a10\") " pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.365335 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lshcx\" (UniqueName: \"kubernetes.io/projected/bfa7e05a-f673-4432-a9c5-e33f67589a10-kube-api-access-lshcx\") pod \"marketplace-operator-79b997595-vp2bb\" (UID: \"bfa7e05a-f673-4432-a9c5-e33f67589a10\") " pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.365365 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bfa7e05a-f673-4432-a9c5-e33f67589a10-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vp2bb\" (UID: \"bfa7e05a-f673-4432-a9c5-e33f67589a10\") " pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.394444 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vp2bb"] Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.466864 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bfa7e05a-f673-4432-a9c5-e33f67589a10-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vp2bb\" (UID: \"bfa7e05a-f673-4432-a9c5-e33f67589a10\") " pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.466927 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lshcx\" (UniqueName: \"kubernetes.io/projected/bfa7e05a-f673-4432-a9c5-e33f67589a10-kube-api-access-lshcx\") pod \"marketplace-operator-79b997595-vp2bb\" (UID: \"bfa7e05a-f673-4432-a9c5-e33f67589a10\") " pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.466960 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bfa7e05a-f673-4432-a9c5-e33f67589a10-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vp2bb\" (UID: \"bfa7e05a-f673-4432-a9c5-e33f67589a10\") " pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.469703 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bfa7e05a-f673-4432-a9c5-e33f67589a10-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vp2bb\" (UID: \"bfa7e05a-f673-4432-a9c5-e33f67589a10\") " pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.472458 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bfa7e05a-f673-4432-a9c5-e33f67589a10-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vp2bb\" (UID: \"bfa7e05a-f673-4432-a9c5-e33f67589a10\") " pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.483269 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lshcx\" (UniqueName: \"kubernetes.io/projected/bfa7e05a-f673-4432-a9c5-e33f67589a10-kube-api-access-lshcx\") pod \"marketplace-operator-79b997595-vp2bb\" (UID: \"bfa7e05a-f673-4432-a9c5-e33f67589a10\") " pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" Dec 02 14:21:38 crc kubenswrapper[4902]: I1202 14:21:38.735982 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" Dec 02 14:21:39 crc kubenswrapper[4902]: I1202 14:21:39.131917 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vp2bb"] Dec 02 14:21:39 crc kubenswrapper[4902]: W1202 14:21:39.139535 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbfa7e05a_f673_4432_a9c5_e33f67589a10.slice/crio-336e0a922a16a55069e537469a928dac444ee91f3d73374c8a2fc306ebda63b8 WatchSource:0}: Error finding container 336e0a922a16a55069e537469a928dac444ee91f3d73374c8a2fc306ebda63b8: Status 404 returned error can't find the container with id 336e0a922a16a55069e537469a928dac444ee91f3d73374c8a2fc306ebda63b8 Dec 02 14:21:39 crc kubenswrapper[4902]: I1202 14:21:39.248501 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" event={"ID":"bfa7e05a-f673-4432-a9c5-e33f67589a10","Type":"ContainerStarted","Data":"336e0a922a16a55069e537469a928dac444ee91f3d73374c8a2fc306ebda63b8"} Dec 02 14:21:39 crc kubenswrapper[4902]: I1202 14:21:39.250460 4902 generic.go:334] "Generic (PLEG): container finished" podID="ab032431-54e6-42b3-b682-8c18f5f29f81" containerID="a50d5ef16512d0d88ca5e965cf989f00cd86606a218a98059ba8c41905969b8b" exitCode=0 Dec 02 14:21:39 crc kubenswrapper[4902]: I1202 14:21:39.250492 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg77b" event={"ID":"ab032431-54e6-42b3-b682-8c18f5f29f81","Type":"ContainerDied","Data":"a50d5ef16512d0d88ca5e965cf989f00cd86606a218a98059ba8c41905969b8b"} Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.258147 4902 generic.go:334] "Generic (PLEG): container finished" podID="6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" containerID="f81fbd73c3996cb02e7e6af08328374e26b5a529abb56fdb3c320a3431aa23b7" exitCode=0 Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.258221 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wsv4" event={"ID":"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6","Type":"ContainerDied","Data":"f81fbd73c3996cb02e7e6af08328374e26b5a529abb56fdb3c320a3431aa23b7"} Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.260007 4902 generic.go:334] "Generic (PLEG): container finished" podID="009e1183-d309-4988-b20c-54c056613558" containerID="bd82e8e7456dd61aae0d1a95226c70d19f5f97300fcb0eb992c4898f3f807ad0" exitCode=0 Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.260083 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" event={"ID":"009e1183-d309-4988-b20c-54c056613558","Type":"ContainerDied","Data":"bd82e8e7456dd61aae0d1a95226c70d19f5f97300fcb0eb992c4898f3f807ad0"} Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.262525 4902 generic.go:334] "Generic (PLEG): container finished" podID="1467a1b3-3c32-4c86-a5ee-5e0f6f112519" containerID="bb7441fdff7728b078fb0a70f053a16da19513cb09c442a161767f8ea27cff7f" exitCode=0 Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.262584 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4f746" event={"ID":"1467a1b3-3c32-4c86-a5ee-5e0f6f112519","Type":"ContainerDied","Data":"bb7441fdff7728b078fb0a70f053a16da19513cb09c442a161767f8ea27cff7f"} Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.797733 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.896102 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crjqv\" (UniqueName: \"kubernetes.io/projected/ab032431-54e6-42b3-b682-8c18f5f29f81-kube-api-access-crjqv\") pod \"ab032431-54e6-42b3-b682-8c18f5f29f81\" (UID: \"ab032431-54e6-42b3-b682-8c18f5f29f81\") " Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.896211 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab032431-54e6-42b3-b682-8c18f5f29f81-utilities\") pod \"ab032431-54e6-42b3-b682-8c18f5f29f81\" (UID: \"ab032431-54e6-42b3-b682-8c18f5f29f81\") " Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.896265 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab032431-54e6-42b3-b682-8c18f5f29f81-catalog-content\") pod \"ab032431-54e6-42b3-b682-8c18f5f29f81\" (UID: \"ab032431-54e6-42b3-b682-8c18f5f29f81\") " Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.897165 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab032431-54e6-42b3-b682-8c18f5f29f81-utilities" (OuterVolumeSpecName: "utilities") pod "ab032431-54e6-42b3-b682-8c18f5f29f81" (UID: "ab032431-54e6-42b3-b682-8c18f5f29f81"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.901034 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab032431-54e6-42b3-b682-8c18f5f29f81-kube-api-access-crjqv" (OuterVolumeSpecName: "kube-api-access-crjqv") pod "ab032431-54e6-42b3-b682-8c18f5f29f81" (UID: "ab032431-54e6-42b3-b682-8c18f5f29f81"). InnerVolumeSpecName "kube-api-access-crjqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.962616 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab032431-54e6-42b3-b682-8c18f5f29f81-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab032431-54e6-42b3-b682-8c18f5f29f81" (UID: "ab032431-54e6-42b3-b682-8c18f5f29f81"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.997588 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crjqv\" (UniqueName: \"kubernetes.io/projected/ab032431-54e6-42b3-b682-8c18f5f29f81-kube-api-access-crjqv\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.997617 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab032431-54e6-42b3-b682-8c18f5f29f81-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:40 crc kubenswrapper[4902]: I1202 14:21:40.997627 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab032431-54e6-42b3-b682-8c18f5f29f81-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.041872 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.098484 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8nk2\" (UniqueName: \"kubernetes.io/projected/009e1183-d309-4988-b20c-54c056613558-kube-api-access-n8nk2\") pod \"009e1183-d309-4988-b20c-54c056613558\" (UID: \"009e1183-d309-4988-b20c-54c056613558\") " Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.098581 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/009e1183-d309-4988-b20c-54c056613558-marketplace-operator-metrics\") pod \"009e1183-d309-4988-b20c-54c056613558\" (UID: \"009e1183-d309-4988-b20c-54c056613558\") " Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.098684 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/009e1183-d309-4988-b20c-54c056613558-marketplace-trusted-ca\") pod \"009e1183-d309-4988-b20c-54c056613558\" (UID: \"009e1183-d309-4988-b20c-54c056613558\") " Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.100316 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/009e1183-d309-4988-b20c-54c056613558-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "009e1183-d309-4988-b20c-54c056613558" (UID: "009e1183-d309-4988-b20c-54c056613558"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.103520 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/009e1183-d309-4988-b20c-54c056613558-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "009e1183-d309-4988-b20c-54c056613558" (UID: "009e1183-d309-4988-b20c-54c056613558"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.104113 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/009e1183-d309-4988-b20c-54c056613558-kube-api-access-n8nk2" (OuterVolumeSpecName: "kube-api-access-n8nk2") pod "009e1183-d309-4988-b20c-54c056613558" (UID: "009e1183-d309-4988-b20c-54c056613558"). InnerVolumeSpecName "kube-api-access-n8nk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.109307 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.200401 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-catalog-content\") pod \"1467a1b3-3c32-4c86-a5ee-5e0f6f112519\" (UID: \"1467a1b3-3c32-4c86-a5ee-5e0f6f112519\") " Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.200470 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-utilities\") pod \"1467a1b3-3c32-4c86-a5ee-5e0f6f112519\" (UID: \"1467a1b3-3c32-4c86-a5ee-5e0f6f112519\") " Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.200703 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgq8p\" (UniqueName: \"kubernetes.io/projected/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-kube-api-access-sgq8p\") pod \"1467a1b3-3c32-4c86-a5ee-5e0f6f112519\" (UID: \"1467a1b3-3c32-4c86-a5ee-5e0f6f112519\") " Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.200975 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8nk2\" (UniqueName: \"kubernetes.io/projected/009e1183-d309-4988-b20c-54c056613558-kube-api-access-n8nk2\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.201001 4902 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/009e1183-d309-4988-b20c-54c056613558-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.201017 4902 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/009e1183-d309-4988-b20c-54c056613558-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.201526 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-utilities" (OuterVolumeSpecName: "utilities") pod "1467a1b3-3c32-4c86-a5ee-5e0f6f112519" (UID: "1467a1b3-3c32-4c86-a5ee-5e0f6f112519"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.207231 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-kube-api-access-sgq8p" (OuterVolumeSpecName: "kube-api-access-sgq8p") pod "1467a1b3-3c32-4c86-a5ee-5e0f6f112519" (UID: "1467a1b3-3c32-4c86-a5ee-5e0f6f112519"). InnerVolumeSpecName "kube-api-access-sgq8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.222311 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1467a1b3-3c32-4c86-a5ee-5e0f6f112519" (UID: "1467a1b3-3c32-4c86-a5ee-5e0f6f112519"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.270821 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg77b" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.271054 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg77b" event={"ID":"ab032431-54e6-42b3-b682-8c18f5f29f81","Type":"ContainerDied","Data":"dfb9ff3c9c4aee50e8a0cd8980ba51d9d089d9a6c6d2417332cf5a1e16054514"} Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.271160 4902 scope.go:117] "RemoveContainer" containerID="a50d5ef16512d0d88ca5e965cf989f00cd86606a218a98059ba8c41905969b8b" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.277291 4902 generic.go:334] "Generic (PLEG): container finished" podID="caf53037-752b-4821-8e41-81976a0a0554" containerID="4532435f02023fee055c5bd23cc1644c9ab42b9a45e5bb91607b7aba3cbb8e96" exitCode=0 Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.277391 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t89xz" event={"ID":"caf53037-752b-4821-8e41-81976a0a0554","Type":"ContainerDied","Data":"4532435f02023fee055c5bd23cc1644c9ab42b9a45e5bb91607b7aba3cbb8e96"} Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.279609 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.280118 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ghgfc" event={"ID":"009e1183-d309-4988-b20c-54c056613558","Type":"ContainerDied","Data":"d20326ebe7eb01d35afbe916c0e3fb59d56949243ce12076e30683d9553669e6"} Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.286993 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" event={"ID":"bfa7e05a-f673-4432-a9c5-e33f67589a10","Type":"ContainerStarted","Data":"520ee73ffac0f5e1e5d70df99349bb3795e0e3b05cde19246172dba2e28c0bc1"} Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.287381 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.287940 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cg77b"] Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.291656 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.292043 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4f746" event={"ID":"1467a1b3-3c32-4c86-a5ee-5e0f6f112519","Type":"ContainerDied","Data":"fccb285e25edc36d08bc42e627fb71094bb9f4c45bd0c161244754ae947b1288"} Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.292236 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4f746" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.292394 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cg77b"] Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.302163 4902 scope.go:117] "RemoveContainer" containerID="4ccd02d97c770b3dbf55aeccf9b85baaebbb6294cd2686ea11f3ab621d6f3c1b" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.304613 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ghgfc"] Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.304721 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgq8p\" (UniqueName: \"kubernetes.io/projected/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-kube-api-access-sgq8p\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.304756 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.304770 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1467a1b3-3c32-4c86-a5ee-5e0f6f112519-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.312070 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ghgfc"] Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.314949 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-vp2bb" podStartSLOduration=3.314932782 podStartE2EDuration="3.314932782s" podCreationTimestamp="2025-12-02 14:21:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:21:41.313863621 +0000 UTC m=+332.505172350" watchObservedRunningTime="2025-12-02 14:21:41.314932782 +0000 UTC m=+332.506241491" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.323326 4902 scope.go:117] "RemoveContainer" containerID="b3b764d182c126246007b2677b38c27d36932dbfa3bde3a68b497a535f6a7d31" Dec 02 14:21:41 crc kubenswrapper[4902]: E1202 14:21:41.329148 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f81fbd73c3996cb02e7e6af08328374e26b5a529abb56fdb3c320a3431aa23b7 is running failed: container process not found" containerID="f81fbd73c3996cb02e7e6af08328374e26b5a529abb56fdb3c320a3431aa23b7" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 14:21:41 crc kubenswrapper[4902]: E1202 14:21:41.329550 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f81fbd73c3996cb02e7e6af08328374e26b5a529abb56fdb3c320a3431aa23b7 is running failed: container process not found" containerID="f81fbd73c3996cb02e7e6af08328374e26b5a529abb56fdb3c320a3431aa23b7" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 14:21:41 crc kubenswrapper[4902]: E1202 14:21:41.329775 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f81fbd73c3996cb02e7e6af08328374e26b5a529abb56fdb3c320a3431aa23b7 is running failed: container process not found" containerID="f81fbd73c3996cb02e7e6af08328374e26b5a529abb56fdb3c320a3431aa23b7" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 14:21:41 crc kubenswrapper[4902]: E1202 14:21:41.329801 4902 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f81fbd73c3996cb02e7e6af08328374e26b5a529abb56fdb3c320a3431aa23b7 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-4wsv4" podUID="6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" containerName="registry-server" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.345637 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4f746"] Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.348747 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4f746"] Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.360594 4902 scope.go:117] "RemoveContainer" containerID="bd82e8e7456dd61aae0d1a95226c70d19f5f97300fcb0eb992c4898f3f807ad0" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.373586 4902 scope.go:117] "RemoveContainer" containerID="bb7441fdff7728b078fb0a70f053a16da19513cb09c442a161767f8ea27cff7f" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.385612 4902 scope.go:117] "RemoveContainer" containerID="68ae2ce6672dc19411cfa45472c584d3151f10e4de9edd9a9a9c0c56024b6fc3" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.403945 4902 scope.go:117] "RemoveContainer" containerID="0fbb991ebaf715d2fbc0c9d4c239fc2f4ac92adc3488ec6406687b0c4b3ae331" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.569387 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.609002 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-catalog-content\") pod \"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6\" (UID: \"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6\") " Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.609071 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f479k\" (UniqueName: \"kubernetes.io/projected/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-kube-api-access-f479k\") pod \"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6\" (UID: \"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6\") " Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.609110 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-utilities\") pod \"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6\" (UID: \"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6\") " Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.611027 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-utilities" (OuterVolumeSpecName: "utilities") pod "6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" (UID: "6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.613884 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-kube-api-access-f479k" (OuterVolumeSpecName: "kube-api-access-f479k") pod "6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" (UID: "6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6"). InnerVolumeSpecName "kube-api-access-f479k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.639518 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.672354 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" (UID: "6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.711062 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ktd7\" (UniqueName: \"kubernetes.io/projected/caf53037-752b-4821-8e41-81976a0a0554-kube-api-access-7ktd7\") pod \"caf53037-752b-4821-8e41-81976a0a0554\" (UID: \"caf53037-752b-4821-8e41-81976a0a0554\") " Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.711120 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/caf53037-752b-4821-8e41-81976a0a0554-catalog-content\") pod \"caf53037-752b-4821-8e41-81976a0a0554\" (UID: \"caf53037-752b-4821-8e41-81976a0a0554\") " Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.711155 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/caf53037-752b-4821-8e41-81976a0a0554-utilities\") pod \"caf53037-752b-4821-8e41-81976a0a0554\" (UID: \"caf53037-752b-4821-8e41-81976a0a0554\") " Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.711372 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.711387 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f479k\" (UniqueName: \"kubernetes.io/projected/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-kube-api-access-f479k\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.711401 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.712111 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/caf53037-752b-4821-8e41-81976a0a0554-utilities" (OuterVolumeSpecName: "utilities") pod "caf53037-752b-4821-8e41-81976a0a0554" (UID: "caf53037-752b-4821-8e41-81976a0a0554"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.713976 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caf53037-752b-4821-8e41-81976a0a0554-kube-api-access-7ktd7" (OuterVolumeSpecName: "kube-api-access-7ktd7") pod "caf53037-752b-4821-8e41-81976a0a0554" (UID: "caf53037-752b-4821-8e41-81976a0a0554"). InnerVolumeSpecName "kube-api-access-7ktd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.813501 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ktd7\" (UniqueName: \"kubernetes.io/projected/caf53037-752b-4821-8e41-81976a0a0554-kube-api-access-7ktd7\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.813768 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/caf53037-752b-4821-8e41-81976a0a0554-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.824491 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/caf53037-752b-4821-8e41-81976a0a0554-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "caf53037-752b-4821-8e41-81976a0a0554" (UID: "caf53037-752b-4821-8e41-81976a0a0554"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:21:41 crc kubenswrapper[4902]: I1202 14:21:41.914541 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/caf53037-752b-4821-8e41-81976a0a0554-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:42 crc kubenswrapper[4902]: I1202 14:21:42.304830 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t89xz" event={"ID":"caf53037-752b-4821-8e41-81976a0a0554","Type":"ContainerDied","Data":"a0cc29a23d2d1d9692481fd7f74a9f250548900fc75913c95c5c21bcf53058eb"} Dec 02 14:21:42 crc kubenswrapper[4902]: I1202 14:21:42.304883 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t89xz" Dec 02 14:21:42 crc kubenswrapper[4902]: I1202 14:21:42.304912 4902 scope.go:117] "RemoveContainer" containerID="4532435f02023fee055c5bd23cc1644c9ab42b9a45e5bb91607b7aba3cbb8e96" Dec 02 14:21:42 crc kubenswrapper[4902]: I1202 14:21:42.307240 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wsv4" event={"ID":"6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6","Type":"ContainerDied","Data":"ba75cc2e9ce88ba42bc89f33f32fe6c7b20a1c03cc42b021d0cc86653e0aa51f"} Dec 02 14:21:42 crc kubenswrapper[4902]: I1202 14:21:42.307269 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4wsv4" Dec 02 14:21:42 crc kubenswrapper[4902]: I1202 14:21:42.319866 4902 scope.go:117] "RemoveContainer" containerID="1d12d3e933230fd6e420928974c58408fa4af15c63d334878f6df8c53ba00d4c" Dec 02 14:21:42 crc kubenswrapper[4902]: I1202 14:21:42.339772 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4wsv4"] Dec 02 14:21:42 crc kubenswrapper[4902]: I1202 14:21:42.345838 4902 scope.go:117] "RemoveContainer" containerID="3784fa7e88a204296515823395e6f7335a33353b3619c0e6af55527f65f1c6f0" Dec 02 14:21:42 crc kubenswrapper[4902]: I1202 14:21:42.351851 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4wsv4"] Dec 02 14:21:42 crc kubenswrapper[4902]: I1202 14:21:42.366785 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t89xz"] Dec 02 14:21:42 crc kubenswrapper[4902]: I1202 14:21:42.371073 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-t89xz"] Dec 02 14:21:42 crc kubenswrapper[4902]: I1202 14:21:42.376908 4902 scope.go:117] "RemoveContainer" containerID="f81fbd73c3996cb02e7e6af08328374e26b5a529abb56fdb3c320a3431aa23b7" Dec 02 14:21:42 crc kubenswrapper[4902]: I1202 14:21:42.392791 4902 scope.go:117] "RemoveContainer" containerID="ddb04958cd0ccc60a34ec5a454ff24fa0a64c0e44a3b7ee397cc6db2fb312a77" Dec 02 14:21:42 crc kubenswrapper[4902]: I1202 14:21:42.412203 4902 scope.go:117] "RemoveContainer" containerID="01274fcf0cdf9ca5514b1e7bcfd1511be0c3904e921aa6482f1301da0a6bd104" Dec 02 14:21:43 crc kubenswrapper[4902]: I1202 14:21:43.118411 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="009e1183-d309-4988-b20c-54c056613558" path="/var/lib/kubelet/pods/009e1183-d309-4988-b20c-54c056613558/volumes" Dec 02 14:21:43 crc kubenswrapper[4902]: I1202 14:21:43.119262 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1467a1b3-3c32-4c86-a5ee-5e0f6f112519" path="/var/lib/kubelet/pods/1467a1b3-3c32-4c86-a5ee-5e0f6f112519/volumes" Dec 02 14:21:43 crc kubenswrapper[4902]: I1202 14:21:43.119953 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" path="/var/lib/kubelet/pods/6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6/volumes" Dec 02 14:21:43 crc kubenswrapper[4902]: I1202 14:21:43.121578 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" path="/var/lib/kubelet/pods/ab032431-54e6-42b3-b682-8c18f5f29f81/volumes" Dec 02 14:21:43 crc kubenswrapper[4902]: I1202 14:21:43.122349 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="caf53037-752b-4821-8e41-81976a0a0554" path="/var/lib/kubelet/pods/caf53037-752b-4821-8e41-81976a0a0554/volumes" Dec 02 14:21:56 crc kubenswrapper[4902]: I1202 14:21:56.618355 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5g67q"] Dec 02 14:21:56 crc kubenswrapper[4902]: I1202 14:21:56.619220 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" podUID="6244913f-facc-4fe2-be8c-09238b2704d6" containerName="controller-manager" containerID="cri-o://7e9bdaf49bbb782053efa12c6f725ec71513a1856b581c37b2bd94ca86c7cbdd" gracePeriod=30 Dec 02 14:21:56 crc kubenswrapper[4902]: I1202 14:21:56.715101 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc"] Dec 02 14:21:56 crc kubenswrapper[4902]: I1202 14:21:56.715344 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" podUID="f61a29cb-55b4-42b7-817f-67600cb70980" containerName="route-controller-manager" containerID="cri-o://0d2437a5f93079b036c105e29106620a3c6bdd840882fbc69ee6c2eb9290f441" gracePeriod=30 Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.401766 4902 generic.go:334] "Generic (PLEG): container finished" podID="6244913f-facc-4fe2-be8c-09238b2704d6" containerID="7e9bdaf49bbb782053efa12c6f725ec71513a1856b581c37b2bd94ca86c7cbdd" exitCode=0 Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.401892 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" event={"ID":"6244913f-facc-4fe2-be8c-09238b2704d6","Type":"ContainerDied","Data":"7e9bdaf49bbb782053efa12c6f725ec71513a1856b581c37b2bd94ca86c7cbdd"} Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.409785 4902 generic.go:334] "Generic (PLEG): container finished" podID="f61a29cb-55b4-42b7-817f-67600cb70980" containerID="0d2437a5f93079b036c105e29106620a3c6bdd840882fbc69ee6c2eb9290f441" exitCode=0 Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.409840 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" event={"ID":"f61a29cb-55b4-42b7-817f-67600cb70980","Type":"ContainerDied","Data":"0d2437a5f93079b036c105e29106620a3c6bdd840882fbc69ee6c2eb9290f441"} Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.545981 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.601283 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.614964 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-client-ca\") pod \"6244913f-facc-4fe2-be8c-09238b2704d6\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.615120 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bv2pn\" (UniqueName: \"kubernetes.io/projected/6244913f-facc-4fe2-be8c-09238b2704d6-kube-api-access-bv2pn\") pod \"6244913f-facc-4fe2-be8c-09238b2704d6\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.615177 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-config\") pod \"6244913f-facc-4fe2-be8c-09238b2704d6\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.615254 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-proxy-ca-bundles\") pod \"6244913f-facc-4fe2-be8c-09238b2704d6\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.615280 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6244913f-facc-4fe2-be8c-09238b2704d6-serving-cert\") pod \"6244913f-facc-4fe2-be8c-09238b2704d6\" (UID: \"6244913f-facc-4fe2-be8c-09238b2704d6\") " Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.615954 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-client-ca" (OuterVolumeSpecName: "client-ca") pod "6244913f-facc-4fe2-be8c-09238b2704d6" (UID: "6244913f-facc-4fe2-be8c-09238b2704d6"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.616021 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "6244913f-facc-4fe2-be8c-09238b2704d6" (UID: "6244913f-facc-4fe2-be8c-09238b2704d6"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.616103 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-config" (OuterVolumeSpecName: "config") pod "6244913f-facc-4fe2-be8c-09238b2704d6" (UID: "6244913f-facc-4fe2-be8c-09238b2704d6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.623594 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6244913f-facc-4fe2-be8c-09238b2704d6-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6244913f-facc-4fe2-be8c-09238b2704d6" (UID: "6244913f-facc-4fe2-be8c-09238b2704d6"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.625096 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6244913f-facc-4fe2-be8c-09238b2704d6-kube-api-access-bv2pn" (OuterVolumeSpecName: "kube-api-access-bv2pn") pod "6244913f-facc-4fe2-be8c-09238b2704d6" (UID: "6244913f-facc-4fe2-be8c-09238b2704d6"). InnerVolumeSpecName "kube-api-access-bv2pn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.716340 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f61a29cb-55b4-42b7-817f-67600cb70980-client-ca\") pod \"f61a29cb-55b4-42b7-817f-67600cb70980\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.716460 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f61a29cb-55b4-42b7-817f-67600cb70980-serving-cert\") pod \"f61a29cb-55b4-42b7-817f-67600cb70980\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.716512 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2n9wm\" (UniqueName: \"kubernetes.io/projected/f61a29cb-55b4-42b7-817f-67600cb70980-kube-api-access-2n9wm\") pod \"f61a29cb-55b4-42b7-817f-67600cb70980\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.716549 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f61a29cb-55b4-42b7-817f-67600cb70980-config\") pod \"f61a29cb-55b4-42b7-817f-67600cb70980\" (UID: \"f61a29cb-55b4-42b7-817f-67600cb70980\") " Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.716901 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bv2pn\" (UniqueName: \"kubernetes.io/projected/6244913f-facc-4fe2-be8c-09238b2704d6-kube-api-access-bv2pn\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.716932 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.716950 4902 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.716965 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6244913f-facc-4fe2-be8c-09238b2704d6-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.716977 4902 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6244913f-facc-4fe2-be8c-09238b2704d6-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.717284 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f61a29cb-55b4-42b7-817f-67600cb70980-client-ca" (OuterVolumeSpecName: "client-ca") pod "f61a29cb-55b4-42b7-817f-67600cb70980" (UID: "f61a29cb-55b4-42b7-817f-67600cb70980"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.717559 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f61a29cb-55b4-42b7-817f-67600cb70980-config" (OuterVolumeSpecName: "config") pod "f61a29cb-55b4-42b7-817f-67600cb70980" (UID: "f61a29cb-55b4-42b7-817f-67600cb70980"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.720683 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f61a29cb-55b4-42b7-817f-67600cb70980-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f61a29cb-55b4-42b7-817f-67600cb70980" (UID: "f61a29cb-55b4-42b7-817f-67600cb70980"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.720690 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f61a29cb-55b4-42b7-817f-67600cb70980-kube-api-access-2n9wm" (OuterVolumeSpecName: "kube-api-access-2n9wm") pod "f61a29cb-55b4-42b7-817f-67600cb70980" (UID: "f61a29cb-55b4-42b7-817f-67600cb70980"). InnerVolumeSpecName "kube-api-access-2n9wm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.818883 4902 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f61a29cb-55b4-42b7-817f-67600cb70980-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.818961 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f61a29cb-55b4-42b7-817f-67600cb70980-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.818999 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2n9wm\" (UniqueName: \"kubernetes.io/projected/f61a29cb-55b4-42b7-817f-67600cb70980-kube-api-access-2n9wm\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:57 crc kubenswrapper[4902]: I1202 14:21:57.819035 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f61a29cb-55b4-42b7-817f-67600cb70980-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.417930 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" event={"ID":"6244913f-facc-4fe2-be8c-09238b2704d6","Type":"ContainerDied","Data":"1ead538245c8680cce8c10772268cd100641c05c59a7f3f21f6ce7c2c5259963"} Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.417984 4902 scope.go:117] "RemoveContainer" containerID="7e9bdaf49bbb782053efa12c6f725ec71513a1856b581c37b2bd94ca86c7cbdd" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.418107 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5g67q" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.430011 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" event={"ID":"f61a29cb-55b4-42b7-817f-67600cb70980","Type":"ContainerDied","Data":"76e5a21021a65d29a66e6a0752b5f19ebc3765d7ab7a6d28ea4fa544ff4272a5"} Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.430137 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.456231 4902 scope.go:117] "RemoveContainer" containerID="0d2437a5f93079b036c105e29106620a3c6bdd840882fbc69ee6c2eb9290f441" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.474037 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5g67q"] Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.477408 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5g67q"] Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.488772 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc"] Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.491463 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-x2flc"] Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569090 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh"] Dec 02 14:21:58 crc kubenswrapper[4902]: E1202 14:21:58.569330 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1467a1b3-3c32-4c86-a5ee-5e0f6f112519" containerName="registry-server" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569346 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="1467a1b3-3c32-4c86-a5ee-5e0f6f112519" containerName="registry-server" Dec 02 14:21:58 crc kubenswrapper[4902]: E1202 14:21:58.569361 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caf53037-752b-4821-8e41-81976a0a0554" containerName="extract-content" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569368 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="caf53037-752b-4821-8e41-81976a0a0554" containerName="extract-content" Dec 02 14:21:58 crc kubenswrapper[4902]: E1202 14:21:58.569378 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caf53037-752b-4821-8e41-81976a0a0554" containerName="registry-server" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569386 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="caf53037-752b-4821-8e41-81976a0a0554" containerName="registry-server" Dec 02 14:21:58 crc kubenswrapper[4902]: E1202 14:21:58.569398 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" containerName="extract-utilities" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569406 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" containerName="extract-utilities" Dec 02 14:21:58 crc kubenswrapper[4902]: E1202 14:21:58.569417 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1467a1b3-3c32-4c86-a5ee-5e0f6f112519" containerName="extract-utilities" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569425 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="1467a1b3-3c32-4c86-a5ee-5e0f6f112519" containerName="extract-utilities" Dec 02 14:21:58 crc kubenswrapper[4902]: E1202 14:21:58.569440 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="009e1183-d309-4988-b20c-54c056613558" containerName="marketplace-operator" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569449 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="009e1183-d309-4988-b20c-54c056613558" containerName="marketplace-operator" Dec 02 14:21:58 crc kubenswrapper[4902]: E1202 14:21:58.569460 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" containerName="extract-content" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569467 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" containerName="extract-content" Dec 02 14:21:58 crc kubenswrapper[4902]: E1202 14:21:58.569476 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" containerName="registry-server" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569485 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" containerName="registry-server" Dec 02 14:21:58 crc kubenswrapper[4902]: E1202 14:21:58.569495 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f61a29cb-55b4-42b7-817f-67600cb70980" containerName="route-controller-manager" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569504 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f61a29cb-55b4-42b7-817f-67600cb70980" containerName="route-controller-manager" Dec 02 14:21:58 crc kubenswrapper[4902]: E1202 14:21:58.569518 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caf53037-752b-4821-8e41-81976a0a0554" containerName="extract-utilities" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569525 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="caf53037-752b-4821-8e41-81976a0a0554" containerName="extract-utilities" Dec 02 14:21:58 crc kubenswrapper[4902]: E1202 14:21:58.569536 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1467a1b3-3c32-4c86-a5ee-5e0f6f112519" containerName="extract-content" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569543 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="1467a1b3-3c32-4c86-a5ee-5e0f6f112519" containerName="extract-content" Dec 02 14:21:58 crc kubenswrapper[4902]: E1202 14:21:58.569555 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" containerName="extract-utilities" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569578 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" containerName="extract-utilities" Dec 02 14:21:58 crc kubenswrapper[4902]: E1202 14:21:58.569587 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" containerName="extract-content" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569595 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" containerName="extract-content" Dec 02 14:21:58 crc kubenswrapper[4902]: E1202 14:21:58.569606 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6244913f-facc-4fe2-be8c-09238b2704d6" containerName="controller-manager" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569614 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="6244913f-facc-4fe2-be8c-09238b2704d6" containerName="controller-manager" Dec 02 14:21:58 crc kubenswrapper[4902]: E1202 14:21:58.569623 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" containerName="registry-server" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569631 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" containerName="registry-server" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569754 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f61a29cb-55b4-42b7-817f-67600cb70980" containerName="route-controller-manager" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569769 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="1467a1b3-3c32-4c86-a5ee-5e0f6f112519" containerName="registry-server" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569781 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab032431-54e6-42b3-b682-8c18f5f29f81" containerName="registry-server" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569790 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="009e1183-d309-4988-b20c-54c056613558" containerName="marketplace-operator" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569803 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="caf53037-752b-4821-8e41-81976a0a0554" containerName="registry-server" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569814 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="6764e4f5-a224-4ba3-b210-7b5d6eb2b2f6" containerName="registry-server" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.569825 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="6244913f-facc-4fe2-be8c-09238b2704d6" containerName="controller-manager" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.570284 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.573400 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.573554 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.573486 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.573744 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.573779 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.576616 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6987b5bb57-5t5bf"] Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.576832 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.577363 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.581397 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh"] Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.585449 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.585658 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.585670 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.588655 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.589012 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.589283 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.593941 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6987b5bb57-5t5bf"] Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.594879 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.632727 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c95082e1-be72-4b00-ab14-e3750112e0d2-serving-cert\") pod \"route-controller-manager-56556846d7-zsbrh\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.632818 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-proxy-ca-bundles\") pod \"controller-manager-6987b5bb57-5t5bf\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.632863 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c95082e1-be72-4b00-ab14-e3750112e0d2-client-ca\") pod \"route-controller-manager-56556846d7-zsbrh\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.633012 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58zmp\" (UniqueName: \"kubernetes.io/projected/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-kube-api-access-58zmp\") pod \"controller-manager-6987b5bb57-5t5bf\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.633057 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c95082e1-be72-4b00-ab14-e3750112e0d2-config\") pod \"route-controller-manager-56556846d7-zsbrh\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.633156 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-config\") pod \"controller-manager-6987b5bb57-5t5bf\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.633224 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-serving-cert\") pod \"controller-manager-6987b5bb57-5t5bf\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.633361 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-client-ca\") pod \"controller-manager-6987b5bb57-5t5bf\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.633478 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqt87\" (UniqueName: \"kubernetes.io/projected/c95082e1-be72-4b00-ab14-e3750112e0d2-kube-api-access-dqt87\") pod \"route-controller-manager-56556846d7-zsbrh\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.734722 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-client-ca\") pod \"controller-manager-6987b5bb57-5t5bf\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.735170 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqt87\" (UniqueName: \"kubernetes.io/projected/c95082e1-be72-4b00-ab14-e3750112e0d2-kube-api-access-dqt87\") pod \"route-controller-manager-56556846d7-zsbrh\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.735231 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c95082e1-be72-4b00-ab14-e3750112e0d2-serving-cert\") pod \"route-controller-manager-56556846d7-zsbrh\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.735264 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-proxy-ca-bundles\") pod \"controller-manager-6987b5bb57-5t5bf\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.735307 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c95082e1-be72-4b00-ab14-e3750112e0d2-client-ca\") pod \"route-controller-manager-56556846d7-zsbrh\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.735360 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58zmp\" (UniqueName: \"kubernetes.io/projected/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-kube-api-access-58zmp\") pod \"controller-manager-6987b5bb57-5t5bf\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.735400 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c95082e1-be72-4b00-ab14-e3750112e0d2-config\") pod \"route-controller-manager-56556846d7-zsbrh\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.735433 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-config\") pod \"controller-manager-6987b5bb57-5t5bf\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.735465 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-serving-cert\") pod \"controller-manager-6987b5bb57-5t5bf\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.736435 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c95082e1-be72-4b00-ab14-e3750112e0d2-client-ca\") pod \"route-controller-manager-56556846d7-zsbrh\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.736772 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-config\") pod \"controller-manager-6987b5bb57-5t5bf\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.736849 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c95082e1-be72-4b00-ab14-e3750112e0d2-config\") pod \"route-controller-manager-56556846d7-zsbrh\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.736913 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-proxy-ca-bundles\") pod \"controller-manager-6987b5bb57-5t5bf\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.737360 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-client-ca\") pod \"controller-manager-6987b5bb57-5t5bf\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.757832 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-serving-cert\") pod \"controller-manager-6987b5bb57-5t5bf\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.760462 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqt87\" (UniqueName: \"kubernetes.io/projected/c95082e1-be72-4b00-ab14-e3750112e0d2-kube-api-access-dqt87\") pod \"route-controller-manager-56556846d7-zsbrh\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.763013 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c95082e1-be72-4b00-ab14-e3750112e0d2-serving-cert\") pod \"route-controller-manager-56556846d7-zsbrh\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.765992 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58zmp\" (UniqueName: \"kubernetes.io/projected/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-kube-api-access-58zmp\") pod \"controller-manager-6987b5bb57-5t5bf\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.915162 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:21:58 crc kubenswrapper[4902]: I1202 14:21:58.923474 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:21:59 crc kubenswrapper[4902]: I1202 14:21:59.100770 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6987b5bb57-5t5bf"] Dec 02 14:21:59 crc kubenswrapper[4902]: I1202 14:21:59.121587 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6244913f-facc-4fe2-be8c-09238b2704d6" path="/var/lib/kubelet/pods/6244913f-facc-4fe2-be8c-09238b2704d6/volumes" Dec 02 14:21:59 crc kubenswrapper[4902]: I1202 14:21:59.122453 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f61a29cb-55b4-42b7-817f-67600cb70980" path="/var/lib/kubelet/pods/f61a29cb-55b4-42b7-817f-67600cb70980/volumes" Dec 02 14:21:59 crc kubenswrapper[4902]: I1202 14:21:59.122917 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh"] Dec 02 14:21:59 crc kubenswrapper[4902]: I1202 14:21:59.209883 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6987b5bb57-5t5bf"] Dec 02 14:21:59 crc kubenswrapper[4902]: W1202 14:21:59.214166 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e7198c9_c37c_4d37_91d0_ed338b09a0b3.slice/crio-2f252a03a23ac4e355a9b6640f05eb1087570a962d8dda816d14791dba92ac61 WatchSource:0}: Error finding container 2f252a03a23ac4e355a9b6640f05eb1087570a962d8dda816d14791dba92ac61: Status 404 returned error can't find the container with id 2f252a03a23ac4e355a9b6640f05eb1087570a962d8dda816d14791dba92ac61 Dec 02 14:21:59 crc kubenswrapper[4902]: I1202 14:21:59.317532 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh"] Dec 02 14:21:59 crc kubenswrapper[4902]: W1202 14:21:59.320341 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc95082e1_be72_4b00_ab14_e3750112e0d2.slice/crio-84a81036bf1dac9714e7aa5211cbf710c41697ed9863f72dcf864e8e2b094978 WatchSource:0}: Error finding container 84a81036bf1dac9714e7aa5211cbf710c41697ed9863f72dcf864e8e2b094978: Status 404 returned error can't find the container with id 84a81036bf1dac9714e7aa5211cbf710c41697ed9863f72dcf864e8e2b094978 Dec 02 14:21:59 crc kubenswrapper[4902]: I1202 14:21:59.439612 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" event={"ID":"c95082e1-be72-4b00-ab14-e3750112e0d2","Type":"ContainerStarted","Data":"84a81036bf1dac9714e7aa5211cbf710c41697ed9863f72dcf864e8e2b094978"} Dec 02 14:21:59 crc kubenswrapper[4902]: I1202 14:21:59.441784 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" event={"ID":"8e7198c9-c37c-4d37-91d0-ed338b09a0b3","Type":"ContainerStarted","Data":"2f252a03a23ac4e355a9b6640f05eb1087570a962d8dda816d14791dba92ac61"} Dec 02 14:22:00 crc kubenswrapper[4902]: I1202 14:22:00.449528 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" event={"ID":"8e7198c9-c37c-4d37-91d0-ed338b09a0b3","Type":"ContainerStarted","Data":"7dc16aef74eba74faf972d94e4af7251e45d46e3427c0e345bfe889580ac463b"} Dec 02 14:22:01 crc kubenswrapper[4902]: I1202 14:22:01.455933 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" event={"ID":"c95082e1-be72-4b00-ab14-e3750112e0d2","Type":"ContainerStarted","Data":"2f65cc500d5ebbc17c68500c37ce3bd50649aa9955bfbac67709ed79124c7cd2"} Dec 02 14:22:01 crc kubenswrapper[4902]: I1202 14:22:01.455977 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" podUID="8e7198c9-c37c-4d37-91d0-ed338b09a0b3" containerName="controller-manager" containerID="cri-o://7dc16aef74eba74faf972d94e4af7251e45d46e3427c0e345bfe889580ac463b" gracePeriod=30 Dec 02 14:22:01 crc kubenswrapper[4902]: I1202 14:22:01.456067 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" podUID="c95082e1-be72-4b00-ab14-e3750112e0d2" containerName="route-controller-manager" containerID="cri-o://2f65cc500d5ebbc17c68500c37ce3bd50649aa9955bfbac67709ed79124c7cd2" gracePeriod=30 Dec 02 14:22:01 crc kubenswrapper[4902]: I1202 14:22:01.456121 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:22:01 crc kubenswrapper[4902]: I1202 14:22:01.463823 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:22:01 crc kubenswrapper[4902]: I1202 14:22:01.479237 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" podStartSLOduration=5.47921971 podStartE2EDuration="5.47921971s" podCreationTimestamp="2025-12-02 14:21:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:22:01.478262343 +0000 UTC m=+352.669571072" watchObservedRunningTime="2025-12-02 14:22:01.47921971 +0000 UTC m=+352.670528419" Dec 02 14:22:01 crc kubenswrapper[4902]: I1202 14:22:01.510504 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" podStartSLOduration=5.510488489 podStartE2EDuration="5.510488489s" podCreationTimestamp="2025-12-02 14:21:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:22:01.510027406 +0000 UTC m=+352.701336105" watchObservedRunningTime="2025-12-02 14:22:01.510488489 +0000 UTC m=+352.701797218" Dec 02 14:22:01 crc kubenswrapper[4902]: I1202 14:22:01.972451 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-56556846d7-zsbrh_c95082e1-be72-4b00-ab14-e3750112e0d2/route-controller-manager/0.log" Dec 02 14:22:01 crc kubenswrapper[4902]: I1202 14:22:01.972776 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.005009 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj"] Dec 02 14:22:02 crc kubenswrapper[4902]: E1202 14:22:02.005228 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c95082e1-be72-4b00-ab14-e3750112e0d2" containerName="route-controller-manager" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.005243 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="c95082e1-be72-4b00-ab14-e3750112e0d2" containerName="route-controller-manager" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.005358 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="c95082e1-be72-4b00-ab14-e3750112e0d2" containerName="route-controller-manager" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.005771 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.067868 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj"] Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.076844 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c95082e1-be72-4b00-ab14-e3750112e0d2-serving-cert\") pod \"c95082e1-be72-4b00-ab14-e3750112e0d2\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.076888 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqt87\" (UniqueName: \"kubernetes.io/projected/c95082e1-be72-4b00-ab14-e3750112e0d2-kube-api-access-dqt87\") pod \"c95082e1-be72-4b00-ab14-e3750112e0d2\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.076920 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c95082e1-be72-4b00-ab14-e3750112e0d2-client-ca\") pod \"c95082e1-be72-4b00-ab14-e3750112e0d2\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.076976 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c95082e1-be72-4b00-ab14-e3750112e0d2-config\") pod \"c95082e1-be72-4b00-ab14-e3750112e0d2\" (UID: \"c95082e1-be72-4b00-ab14-e3750112e0d2\") " Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.077638 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c95082e1-be72-4b00-ab14-e3750112e0d2-client-ca" (OuterVolumeSpecName: "client-ca") pod "c95082e1-be72-4b00-ab14-e3750112e0d2" (UID: "c95082e1-be72-4b00-ab14-e3750112e0d2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.077710 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c95082e1-be72-4b00-ab14-e3750112e0d2-config" (OuterVolumeSpecName: "config") pod "c95082e1-be72-4b00-ab14-e3750112e0d2" (UID: "c95082e1-be72-4b00-ab14-e3750112e0d2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.077851 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/073639cb-47da-4542-b925-9d60e85349bf-config\") pod \"route-controller-manager-5756f44d6-zzhhj\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.077902 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/073639cb-47da-4542-b925-9d60e85349bf-client-ca\") pod \"route-controller-manager-5756f44d6-zzhhj\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.077934 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkv4k\" (UniqueName: \"kubernetes.io/projected/073639cb-47da-4542-b925-9d60e85349bf-kube-api-access-xkv4k\") pod \"route-controller-manager-5756f44d6-zzhhj\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.077967 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/073639cb-47da-4542-b925-9d60e85349bf-serving-cert\") pod \"route-controller-manager-5756f44d6-zzhhj\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.078004 4902 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c95082e1-be72-4b00-ab14-e3750112e0d2-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.078015 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c95082e1-be72-4b00-ab14-e3750112e0d2-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.081713 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c95082e1-be72-4b00-ab14-e3750112e0d2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c95082e1-be72-4b00-ab14-e3750112e0d2" (UID: "c95082e1-be72-4b00-ab14-e3750112e0d2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.085716 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c95082e1-be72-4b00-ab14-e3750112e0d2-kube-api-access-dqt87" (OuterVolumeSpecName: "kube-api-access-dqt87") pod "c95082e1-be72-4b00-ab14-e3750112e0d2" (UID: "c95082e1-be72-4b00-ab14-e3750112e0d2"). InnerVolumeSpecName "kube-api-access-dqt87". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.178451 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/073639cb-47da-4542-b925-9d60e85349bf-serving-cert\") pod \"route-controller-manager-5756f44d6-zzhhj\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.178503 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/073639cb-47da-4542-b925-9d60e85349bf-config\") pod \"route-controller-manager-5756f44d6-zzhhj\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.178540 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/073639cb-47da-4542-b925-9d60e85349bf-client-ca\") pod \"route-controller-manager-5756f44d6-zzhhj\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.178591 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkv4k\" (UniqueName: \"kubernetes.io/projected/073639cb-47da-4542-b925-9d60e85349bf-kube-api-access-xkv4k\") pod \"route-controller-manager-5756f44d6-zzhhj\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.178625 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c95082e1-be72-4b00-ab14-e3750112e0d2-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.178636 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqt87\" (UniqueName: \"kubernetes.io/projected/c95082e1-be72-4b00-ab14-e3750112e0d2-kube-api-access-dqt87\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.179534 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/073639cb-47da-4542-b925-9d60e85349bf-client-ca\") pod \"route-controller-manager-5756f44d6-zzhhj\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.180794 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/073639cb-47da-4542-b925-9d60e85349bf-config\") pod \"route-controller-manager-5756f44d6-zzhhj\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.182617 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/073639cb-47da-4542-b925-9d60e85349bf-serving-cert\") pod \"route-controller-manager-5756f44d6-zzhhj\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.195206 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkv4k\" (UniqueName: \"kubernetes.io/projected/073639cb-47da-4542-b925-9d60e85349bf-kube-api-access-xkv4k\") pod \"route-controller-manager-5756f44d6-zzhhj\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.356101 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.470437 4902 generic.go:334] "Generic (PLEG): container finished" podID="8e7198c9-c37c-4d37-91d0-ed338b09a0b3" containerID="7dc16aef74eba74faf972d94e4af7251e45d46e3427c0e345bfe889580ac463b" exitCode=0 Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.470500 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" event={"ID":"8e7198c9-c37c-4d37-91d0-ed338b09a0b3","Type":"ContainerDied","Data":"7dc16aef74eba74faf972d94e4af7251e45d46e3427c0e345bfe889580ac463b"} Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.472233 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-56556846d7-zsbrh_c95082e1-be72-4b00-ab14-e3750112e0d2/route-controller-manager/0.log" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.472273 4902 generic.go:334] "Generic (PLEG): container finished" podID="c95082e1-be72-4b00-ab14-e3750112e0d2" containerID="2f65cc500d5ebbc17c68500c37ce3bd50649aa9955bfbac67709ed79124c7cd2" exitCode=255 Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.472295 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" event={"ID":"c95082e1-be72-4b00-ab14-e3750112e0d2","Type":"ContainerDied","Data":"2f65cc500d5ebbc17c68500c37ce3bd50649aa9955bfbac67709ed79124c7cd2"} Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.472320 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" event={"ID":"c95082e1-be72-4b00-ab14-e3750112e0d2","Type":"ContainerDied","Data":"84a81036bf1dac9714e7aa5211cbf710c41697ed9863f72dcf864e8e2b094978"} Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.472340 4902 scope.go:117] "RemoveContainer" containerID="2f65cc500d5ebbc17c68500c37ce3bd50649aa9955bfbac67709ed79124c7cd2" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.472355 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.502595 4902 scope.go:117] "RemoveContainer" containerID="2f65cc500d5ebbc17c68500c37ce3bd50649aa9955bfbac67709ed79124c7cd2" Dec 02 14:22:02 crc kubenswrapper[4902]: E1202 14:22:02.503291 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f65cc500d5ebbc17c68500c37ce3bd50649aa9955bfbac67709ed79124c7cd2\": container with ID starting with 2f65cc500d5ebbc17c68500c37ce3bd50649aa9955bfbac67709ed79124c7cd2 not found: ID does not exist" containerID="2f65cc500d5ebbc17c68500c37ce3bd50649aa9955bfbac67709ed79124c7cd2" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.503340 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f65cc500d5ebbc17c68500c37ce3bd50649aa9955bfbac67709ed79124c7cd2"} err="failed to get container status \"2f65cc500d5ebbc17c68500c37ce3bd50649aa9955bfbac67709ed79124c7cd2\": rpc error: code = NotFound desc = could not find container \"2f65cc500d5ebbc17c68500c37ce3bd50649aa9955bfbac67709ed79124c7cd2\": container with ID starting with 2f65cc500d5ebbc17c68500c37ce3bd50649aa9955bfbac67709ed79124c7cd2 not found: ID does not exist" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.506675 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh"] Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.510236 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-56556846d7-zsbrh"] Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.639818 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.685364 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-client-ca\") pod \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.685463 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-proxy-ca-bundles\") pod \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.685586 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58zmp\" (UniqueName: \"kubernetes.io/projected/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-kube-api-access-58zmp\") pod \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.685626 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-config\") pod \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.685684 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-serving-cert\") pod \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\" (UID: \"8e7198c9-c37c-4d37-91d0-ed338b09a0b3\") " Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.686237 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8e7198c9-c37c-4d37-91d0-ed338b09a0b3" (UID: "8e7198c9-c37c-4d37-91d0-ed338b09a0b3"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.686249 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-client-ca" (OuterVolumeSpecName: "client-ca") pod "8e7198c9-c37c-4d37-91d0-ed338b09a0b3" (UID: "8e7198c9-c37c-4d37-91d0-ed338b09a0b3"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.687620 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-config" (OuterVolumeSpecName: "config") pod "8e7198c9-c37c-4d37-91d0-ed338b09a0b3" (UID: "8e7198c9-c37c-4d37-91d0-ed338b09a0b3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.690639 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-kube-api-access-58zmp" (OuterVolumeSpecName: "kube-api-access-58zmp") pod "8e7198c9-c37c-4d37-91d0-ed338b09a0b3" (UID: "8e7198c9-c37c-4d37-91d0-ed338b09a0b3"). InnerVolumeSpecName "kube-api-access-58zmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.698133 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8e7198c9-c37c-4d37-91d0-ed338b09a0b3" (UID: "8e7198c9-c37c-4d37-91d0-ed338b09a0b3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.787971 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58zmp\" (UniqueName: \"kubernetes.io/projected/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-kube-api-access-58zmp\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.788107 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.788349 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.788382 4902 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.788404 4902 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e7198c9-c37c-4d37-91d0-ed338b09a0b3-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:02 crc kubenswrapper[4902]: I1202 14:22:02.830799 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj"] Dec 02 14:22:02 crc kubenswrapper[4902]: W1202 14:22:02.836601 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod073639cb_47da_4542_b925_9d60e85349bf.slice/crio-01d1a0afac5f5d212e4511b953da9a4871b9a806221fe0a30615509410714754 WatchSource:0}: Error finding container 01d1a0afac5f5d212e4511b953da9a4871b9a806221fe0a30615509410714754: Status 404 returned error can't find the container with id 01d1a0afac5f5d212e4511b953da9a4871b9a806221fe0a30615509410714754 Dec 02 14:22:03 crc kubenswrapper[4902]: I1202 14:22:03.118079 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c95082e1-be72-4b00-ab14-e3750112e0d2" path="/var/lib/kubelet/pods/c95082e1-be72-4b00-ab14-e3750112e0d2/volumes" Dec 02 14:22:03 crc kubenswrapper[4902]: I1202 14:22:03.479419 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" event={"ID":"073639cb-47da-4542-b925-9d60e85349bf","Type":"ContainerStarted","Data":"01d1a0afac5f5d212e4511b953da9a4871b9a806221fe0a30615509410714754"} Dec 02 14:22:03 crc kubenswrapper[4902]: I1202 14:22:03.481653 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" event={"ID":"8e7198c9-c37c-4d37-91d0-ed338b09a0b3","Type":"ContainerDied","Data":"2f252a03a23ac4e355a9b6640f05eb1087570a962d8dda816d14791dba92ac61"} Dec 02 14:22:03 crc kubenswrapper[4902]: I1202 14:22:03.481695 4902 scope.go:117] "RemoveContainer" containerID="7dc16aef74eba74faf972d94e4af7251e45d46e3427c0e345bfe889580ac463b" Dec 02 14:22:03 crc kubenswrapper[4902]: I1202 14:22:03.481760 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6987b5bb57-5t5bf" Dec 02 14:22:03 crc kubenswrapper[4902]: I1202 14:22:03.502406 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6987b5bb57-5t5bf"] Dec 02 14:22:03 crc kubenswrapper[4902]: I1202 14:22:03.505433 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6987b5bb57-5t5bf"] Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.494888 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" event={"ID":"073639cb-47da-4542-b925-9d60e85349bf","Type":"ContainerStarted","Data":"540b2c8faf004ddc109bc2a35287964cd886e8c6d5b1e68976a40446bbeb8fbc"} Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.589826 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7ccb65cc84-qctdx"] Dec 02 14:22:04 crc kubenswrapper[4902]: E1202 14:22:04.590215 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e7198c9-c37c-4d37-91d0-ed338b09a0b3" containerName="controller-manager" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.590259 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e7198c9-c37c-4d37-91d0-ed338b09a0b3" containerName="controller-manager" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.590482 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e7198c9-c37c-4d37-91d0-ed338b09a0b3" containerName="controller-manager" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.591291 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.599617 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.600166 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.600214 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.600486 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.600670 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.601089 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.608268 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.612301 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-client-ca\") pod \"controller-manager-7ccb65cc84-qctdx\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.612409 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/045134ab-7002-4252-8a74-b9bf51050548-serving-cert\") pod \"controller-manager-7ccb65cc84-qctdx\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.612530 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvv9b\" (UniqueName: \"kubernetes.io/projected/045134ab-7002-4252-8a74-b9bf51050548-kube-api-access-lvv9b\") pod \"controller-manager-7ccb65cc84-qctdx\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.612830 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-config\") pod \"controller-manager-7ccb65cc84-qctdx\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.612899 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-proxy-ca-bundles\") pod \"controller-manager-7ccb65cc84-qctdx\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.617189 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7ccb65cc84-qctdx"] Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.641954 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g9hgb"] Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.714331 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-client-ca\") pod \"controller-manager-7ccb65cc84-qctdx\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.714665 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/045134ab-7002-4252-8a74-b9bf51050548-serving-cert\") pod \"controller-manager-7ccb65cc84-qctdx\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.714710 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvv9b\" (UniqueName: \"kubernetes.io/projected/045134ab-7002-4252-8a74-b9bf51050548-kube-api-access-lvv9b\") pod \"controller-manager-7ccb65cc84-qctdx\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.714734 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-config\") pod \"controller-manager-7ccb65cc84-qctdx\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.714752 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-proxy-ca-bundles\") pod \"controller-manager-7ccb65cc84-qctdx\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.716212 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-client-ca\") pod \"controller-manager-7ccb65cc84-qctdx\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.716274 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-proxy-ca-bundles\") pod \"controller-manager-7ccb65cc84-qctdx\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.716736 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-config\") pod \"controller-manager-7ccb65cc84-qctdx\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.735597 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/045134ab-7002-4252-8a74-b9bf51050548-serving-cert\") pod \"controller-manager-7ccb65cc84-qctdx\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.737273 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvv9b\" (UniqueName: \"kubernetes.io/projected/045134ab-7002-4252-8a74-b9bf51050548-kube-api-access-lvv9b\") pod \"controller-manager-7ccb65cc84-qctdx\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:04 crc kubenswrapper[4902]: I1202 14:22:04.922622 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:05 crc kubenswrapper[4902]: I1202 14:22:05.118626 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e7198c9-c37c-4d37-91d0-ed338b09a0b3" path="/var/lib/kubelet/pods/8e7198c9-c37c-4d37-91d0-ed338b09a0b3/volumes" Dec 02 14:22:05 crc kubenswrapper[4902]: I1202 14:22:05.120488 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7ccb65cc84-qctdx"] Dec 02 14:22:05 crc kubenswrapper[4902]: I1202 14:22:05.500727 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" event={"ID":"045134ab-7002-4252-8a74-b9bf51050548","Type":"ContainerStarted","Data":"3c490de7320cb429b4a2f523d74311d3c7781f0d65fafd4a5490af2909bc557d"} Dec 02 14:22:06 crc kubenswrapper[4902]: I1202 14:22:06.507358 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" event={"ID":"045134ab-7002-4252-8a74-b9bf51050548","Type":"ContainerStarted","Data":"a4b8818bdcaab3adf585a93baa966d4676dd74cc331d0d45a4d056b8146f5952"} Dec 02 14:22:06 crc kubenswrapper[4902]: I1202 14:22:06.507871 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:06 crc kubenswrapper[4902]: I1202 14:22:06.513344 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:06 crc kubenswrapper[4902]: I1202 14:22:06.523640 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" podStartSLOduration=7.523621518 podStartE2EDuration="7.523621518s" podCreationTimestamp="2025-12-02 14:21:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:22:06.520530479 +0000 UTC m=+357.711839188" watchObservedRunningTime="2025-12-02 14:22:06.523621518 +0000 UTC m=+357.714930237" Dec 02 14:22:06 crc kubenswrapper[4902]: I1202 14:22:06.542260 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" podStartSLOduration=7.542240164 podStartE2EDuration="7.542240164s" podCreationTimestamp="2025-12-02 14:21:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:22:06.537405465 +0000 UTC m=+357.728714174" watchObservedRunningTime="2025-12-02 14:22:06.542240164 +0000 UTC m=+357.733548883" Dec 02 14:22:07 crc kubenswrapper[4902]: I1202 14:22:07.512463 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:07 crc kubenswrapper[4902]: I1202 14:22:07.518613 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:29 crc kubenswrapper[4902]: I1202 14:22:29.670290 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" podUID="e530e0e8-55f8-4601-b587-2069a7d6fec8" containerName="oauth-openshift" containerID="cri-o://d7f336479f89ffe6714e99205131d49620565e1551a2a89555c4f597b2ccf375" gracePeriod=15 Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.137085 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.164616 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6b788bb46c-pl97q"] Dec 02 14:22:30 crc kubenswrapper[4902]: E1202 14:22:30.164810 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e530e0e8-55f8-4601-b587-2069a7d6fec8" containerName="oauth-openshift" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.164820 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="e530e0e8-55f8-4601-b587-2069a7d6fec8" containerName="oauth-openshift" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.164909 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="e530e0e8-55f8-4601-b587-2069a7d6fec8" containerName="oauth-openshift" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.165234 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.178100 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-trusted-ca-bundle\") pod \"e530e0e8-55f8-4601-b587-2069a7d6fec8\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.178179 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-session\") pod \"e530e0e8-55f8-4601-b587-2069a7d6fec8\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.178214 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-error\") pod \"e530e0e8-55f8-4601-b587-2069a7d6fec8\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.178243 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-service-ca\") pod \"e530e0e8-55f8-4601-b587-2069a7d6fec8\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.178268 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-cliconfig\") pod \"e530e0e8-55f8-4601-b587-2069a7d6fec8\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.178299 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-serving-cert\") pod \"e530e0e8-55f8-4601-b587-2069a7d6fec8\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.178325 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-ocp-branding-template\") pod \"e530e0e8-55f8-4601-b587-2069a7d6fec8\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.178352 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmn78\" (UniqueName: \"kubernetes.io/projected/e530e0e8-55f8-4601-b587-2069a7d6fec8-kube-api-access-wmn78\") pod \"e530e0e8-55f8-4601-b587-2069a7d6fec8\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.178390 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-idp-0-file-data\") pod \"e530e0e8-55f8-4601-b587-2069a7d6fec8\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.178422 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-router-certs\") pod \"e530e0e8-55f8-4601-b587-2069a7d6fec8\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.178452 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-audit-policies\") pod \"e530e0e8-55f8-4601-b587-2069a7d6fec8\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.178487 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-provider-selection\") pod \"e530e0e8-55f8-4601-b587-2069a7d6fec8\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.178525 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e530e0e8-55f8-4601-b587-2069a7d6fec8-audit-dir\") pod \"e530e0e8-55f8-4601-b587-2069a7d6fec8\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.178585 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-login\") pod \"e530e0e8-55f8-4601-b587-2069a7d6fec8\" (UID: \"e530e0e8-55f8-4601-b587-2069a7d6fec8\") " Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.179434 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "e530e0e8-55f8-4601-b587-2069a7d6fec8" (UID: "e530e0e8-55f8-4601-b587-2069a7d6fec8"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.179598 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e530e0e8-55f8-4601-b587-2069a7d6fec8-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "e530e0e8-55f8-4601-b587-2069a7d6fec8" (UID: "e530e0e8-55f8-4601-b587-2069a7d6fec8"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.180641 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6b788bb46c-pl97q"] Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.182974 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "e530e0e8-55f8-4601-b587-2069a7d6fec8" (UID: "e530e0e8-55f8-4601-b587-2069a7d6fec8"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.183657 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "e530e0e8-55f8-4601-b587-2069a7d6fec8" (UID: "e530e0e8-55f8-4601-b587-2069a7d6fec8"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.184238 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "e530e0e8-55f8-4601-b587-2069a7d6fec8" (UID: "e530e0e8-55f8-4601-b587-2069a7d6fec8"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.185796 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e530e0e8-55f8-4601-b587-2069a7d6fec8-kube-api-access-wmn78" (OuterVolumeSpecName: "kube-api-access-wmn78") pod "e530e0e8-55f8-4601-b587-2069a7d6fec8" (UID: "e530e0e8-55f8-4601-b587-2069a7d6fec8"). InnerVolumeSpecName "kube-api-access-wmn78". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.188161 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "e530e0e8-55f8-4601-b587-2069a7d6fec8" (UID: "e530e0e8-55f8-4601-b587-2069a7d6fec8"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.188631 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "e530e0e8-55f8-4601-b587-2069a7d6fec8" (UID: "e530e0e8-55f8-4601-b587-2069a7d6fec8"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.191489 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "e530e0e8-55f8-4601-b587-2069a7d6fec8" (UID: "e530e0e8-55f8-4601-b587-2069a7d6fec8"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.192135 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "e530e0e8-55f8-4601-b587-2069a7d6fec8" (UID: "e530e0e8-55f8-4601-b587-2069a7d6fec8"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.195836 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "e530e0e8-55f8-4601-b587-2069a7d6fec8" (UID: "e530e0e8-55f8-4601-b587-2069a7d6fec8"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.196693 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "e530e0e8-55f8-4601-b587-2069a7d6fec8" (UID: "e530e0e8-55f8-4601-b587-2069a7d6fec8"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.196976 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "e530e0e8-55f8-4601-b587-2069a7d6fec8" (UID: "e530e0e8-55f8-4601-b587-2069a7d6fec8"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.198024 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "e530e0e8-55f8-4601-b587-2069a7d6fec8" (UID: "e530e0e8-55f8-4601-b587-2069a7d6fec8"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.279369 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-service-ca\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.279424 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ee05f11d-d96b-4e8b-be79-23b40ff384c3-audit-policies\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.279444 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc6rn\" (UniqueName: \"kubernetes.io/projected/ee05f11d-d96b-4e8b-be79-23b40ff384c3-kube-api-access-pc6rn\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.279470 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.279492 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.279510 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.279524 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-user-template-error\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.279845 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.279960 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-router-certs\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280022 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-session\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280064 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280119 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ee05f11d-d96b-4e8b-be79-23b40ff384c3-audit-dir\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280158 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-user-template-login\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280194 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280287 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280316 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280337 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280357 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280377 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmn78\" (UniqueName: \"kubernetes.io/projected/e530e0e8-55f8-4601-b587-2069a7d6fec8-kube-api-access-wmn78\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280396 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280431 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280449 4902 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280468 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280488 4902 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e530e0e8-55f8-4601-b587-2069a7d6fec8-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280506 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280523 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280542 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.280559 4902 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e530e0e8-55f8-4601-b587-2069a7d6fec8-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.382096 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-router-certs\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.382482 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-session\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.382731 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.383043 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ee05f11d-d96b-4e8b-be79-23b40ff384c3-audit-dir\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.383255 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-user-template-login\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.383426 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.383627 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-service-ca\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.383835 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ee05f11d-d96b-4e8b-be79-23b40ff384c3-audit-policies\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.384066 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc6rn\" (UniqueName: \"kubernetes.io/projected/ee05f11d-d96b-4e8b-be79-23b40ff384c3-kube-api-access-pc6rn\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.384296 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.384478 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.384674 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.384821 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-user-template-error\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.385011 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.391302 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.395300 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-router-certs\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.399955 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-session\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.403403 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.403655 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ee05f11d-d96b-4e8b-be79-23b40ff384c3-audit-dir\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.408244 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-user-template-login\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.412752 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.413932 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-service-ca\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.414960 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ee05f11d-d96b-4e8b-be79-23b40ff384c3-audit-policies\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.417139 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.418140 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.422756 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.430265 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ee05f11d-d96b-4e8b-be79-23b40ff384c3-v4-0-config-user-template-error\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.531827 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc6rn\" (UniqueName: \"kubernetes.io/projected/ee05f11d-d96b-4e8b-be79-23b40ff384c3-kube-api-access-pc6rn\") pod \"oauth-openshift-6b788bb46c-pl97q\" (UID: \"ee05f11d-d96b-4e8b-be79-23b40ff384c3\") " pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.533903 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.647843 4902 generic.go:334] "Generic (PLEG): container finished" podID="e530e0e8-55f8-4601-b587-2069a7d6fec8" containerID="d7f336479f89ffe6714e99205131d49620565e1551a2a89555c4f597b2ccf375" exitCode=0 Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.647884 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.647885 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" event={"ID":"e530e0e8-55f8-4601-b587-2069a7d6fec8","Type":"ContainerDied","Data":"d7f336479f89ffe6714e99205131d49620565e1551a2a89555c4f597b2ccf375"} Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.648026 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-g9hgb" event={"ID":"e530e0e8-55f8-4601-b587-2069a7d6fec8","Type":"ContainerDied","Data":"315d5bf01b0c0d0c1540dce0e7cf7fdc2904a3227cd5a6af0bdc82e264332499"} Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.648068 4902 scope.go:117] "RemoveContainer" containerID="d7f336479f89ffe6714e99205131d49620565e1551a2a89555c4f597b2ccf375" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.676788 4902 scope.go:117] "RemoveContainer" containerID="d7f336479f89ffe6714e99205131d49620565e1551a2a89555c4f597b2ccf375" Dec 02 14:22:30 crc kubenswrapper[4902]: E1202 14:22:30.677549 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7f336479f89ffe6714e99205131d49620565e1551a2a89555c4f597b2ccf375\": container with ID starting with d7f336479f89ffe6714e99205131d49620565e1551a2a89555c4f597b2ccf375 not found: ID does not exist" containerID="d7f336479f89ffe6714e99205131d49620565e1551a2a89555c4f597b2ccf375" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.677669 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7f336479f89ffe6714e99205131d49620565e1551a2a89555c4f597b2ccf375"} err="failed to get container status \"d7f336479f89ffe6714e99205131d49620565e1551a2a89555c4f597b2ccf375\": rpc error: code = NotFound desc = could not find container \"d7f336479f89ffe6714e99205131d49620565e1551a2a89555c4f597b2ccf375\": container with ID starting with d7f336479f89ffe6714e99205131d49620565e1551a2a89555c4f597b2ccf375 not found: ID does not exist" Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.679949 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g9hgb"] Dec 02 14:22:30 crc kubenswrapper[4902]: I1202 14:22:30.683020 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g9hgb"] Dec 02 14:22:31 crc kubenswrapper[4902]: I1202 14:22:31.000665 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6b788bb46c-pl97q"] Dec 02 14:22:31 crc kubenswrapper[4902]: I1202 14:22:31.116411 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e530e0e8-55f8-4601-b587-2069a7d6fec8" path="/var/lib/kubelet/pods/e530e0e8-55f8-4601-b587-2069a7d6fec8/volumes" Dec 02 14:22:31 crc kubenswrapper[4902]: I1202 14:22:31.658612 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" event={"ID":"ee05f11d-d96b-4e8b-be79-23b40ff384c3","Type":"ContainerStarted","Data":"0cdb1cd7e3da6fb298490005381bc41137814448f5f5415f01173054e764f4d2"} Dec 02 14:22:32 crc kubenswrapper[4902]: I1202 14:22:32.681418 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" event={"ID":"ee05f11d-d96b-4e8b-be79-23b40ff384c3","Type":"ContainerStarted","Data":"ec0ec5848b56f420ae9de1449f90d7f97f1752bc0a595dcf0d7b47fca9eb653c"} Dec 02 14:22:32 crc kubenswrapper[4902]: I1202 14:22:32.681856 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:32 crc kubenswrapper[4902]: I1202 14:22:32.712435 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" podStartSLOduration=28.712414799 podStartE2EDuration="28.712414799s" podCreationTimestamp="2025-12-02 14:22:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:22:32.709474384 +0000 UTC m=+383.900783123" watchObservedRunningTime="2025-12-02 14:22:32.712414799 +0000 UTC m=+383.903723518" Dec 02 14:22:32 crc kubenswrapper[4902]: I1202 14:22:32.976671 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6b788bb46c-pl97q" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.175838 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-jsgkf"] Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.176945 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.193789 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-jsgkf"] Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.338009 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/633177cb-f74c-4493-9479-54223daaebfa-bound-sa-token\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.338142 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.338198 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9sz7\" (UniqueName: \"kubernetes.io/projected/633177cb-f74c-4493-9479-54223daaebfa-kube-api-access-t9sz7\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.338229 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/633177cb-f74c-4493-9479-54223daaebfa-registry-certificates\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.338293 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/633177cb-f74c-4493-9479-54223daaebfa-ca-trust-extracted\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.338375 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/633177cb-f74c-4493-9479-54223daaebfa-trusted-ca\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.338409 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/633177cb-f74c-4493-9479-54223daaebfa-registry-tls\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.338457 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/633177cb-f74c-4493-9479-54223daaebfa-installation-pull-secrets\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.358396 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.439770 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9sz7\" (UniqueName: \"kubernetes.io/projected/633177cb-f74c-4493-9479-54223daaebfa-kube-api-access-t9sz7\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.439827 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/633177cb-f74c-4493-9479-54223daaebfa-registry-certificates\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.439857 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/633177cb-f74c-4493-9479-54223daaebfa-ca-trust-extracted\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.439888 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/633177cb-f74c-4493-9479-54223daaebfa-trusted-ca\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.439908 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/633177cb-f74c-4493-9479-54223daaebfa-registry-tls\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.439935 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/633177cb-f74c-4493-9479-54223daaebfa-installation-pull-secrets\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.439954 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/633177cb-f74c-4493-9479-54223daaebfa-bound-sa-token\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.440839 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/633177cb-f74c-4493-9479-54223daaebfa-ca-trust-extracted\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.441466 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/633177cb-f74c-4493-9479-54223daaebfa-registry-certificates\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.442011 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/633177cb-f74c-4493-9479-54223daaebfa-trusted-ca\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.446138 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/633177cb-f74c-4493-9479-54223daaebfa-registry-tls\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.453129 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/633177cb-f74c-4493-9479-54223daaebfa-installation-pull-secrets\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.456700 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9sz7\" (UniqueName: \"kubernetes.io/projected/633177cb-f74c-4493-9479-54223daaebfa-kube-api-access-t9sz7\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.459103 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/633177cb-f74c-4493-9479-54223daaebfa-bound-sa-token\") pod \"image-registry-66df7c8f76-jsgkf\" (UID: \"633177cb-f74c-4493-9479-54223daaebfa\") " pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.492322 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.731533 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.731610 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:22:34 crc kubenswrapper[4902]: I1202 14:22:34.934045 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-jsgkf"] Dec 02 14:22:34 crc kubenswrapper[4902]: W1202 14:22:34.943338 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod633177cb_f74c_4493_9479_54223daaebfa.slice/crio-37c54fb9006593fd73a2dd8f4a6c92f59af4c33f5771de74d5374287799b6304 WatchSource:0}: Error finding container 37c54fb9006593fd73a2dd8f4a6c92f59af4c33f5771de74d5374287799b6304: Status 404 returned error can't find the container with id 37c54fb9006593fd73a2dd8f4a6c92f59af4c33f5771de74d5374287799b6304 Dec 02 14:22:35 crc kubenswrapper[4902]: I1202 14:22:35.699450 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" event={"ID":"633177cb-f74c-4493-9479-54223daaebfa","Type":"ContainerStarted","Data":"63461b5d13f9d67de29de31d6528537b3189646dab734f0951503be591ded6bc"} Dec 02 14:22:35 crc kubenswrapper[4902]: I1202 14:22:35.700615 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:35 crc kubenswrapper[4902]: I1202 14:22:35.700714 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" event={"ID":"633177cb-f74c-4493-9479-54223daaebfa","Type":"ContainerStarted","Data":"37c54fb9006593fd73a2dd8f4a6c92f59af4c33f5771de74d5374287799b6304"} Dec 02 14:22:35 crc kubenswrapper[4902]: I1202 14:22:35.724041 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" podStartSLOduration=1.724025489 podStartE2EDuration="1.724025489s" podCreationTimestamp="2025-12-02 14:22:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:22:35.721833506 +0000 UTC m=+386.913142305" watchObservedRunningTime="2025-12-02 14:22:35.724025489 +0000 UTC m=+386.915334198" Dec 02 14:22:36 crc kubenswrapper[4902]: I1202 14:22:36.642038 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7ccb65cc84-qctdx"] Dec 02 14:22:36 crc kubenswrapper[4902]: I1202 14:22:36.642388 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" podUID="045134ab-7002-4252-8a74-b9bf51050548" containerName="controller-manager" containerID="cri-o://a4b8818bdcaab3adf585a93baa966d4676dd74cc331d0d45a4d056b8146f5952" gracePeriod=30 Dec 02 14:22:36 crc kubenswrapper[4902]: I1202 14:22:36.668301 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj"] Dec 02 14:22:36 crc kubenswrapper[4902]: I1202 14:22:36.668552 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" podUID="073639cb-47da-4542-b925-9d60e85349bf" containerName="route-controller-manager" containerID="cri-o://540b2c8faf004ddc109bc2a35287964cd886e8c6d5b1e68976a40446bbeb8fbc" gracePeriod=30 Dec 02 14:22:37 crc kubenswrapper[4902]: I1202 14:22:37.712236 4902 generic.go:334] "Generic (PLEG): container finished" podID="073639cb-47da-4542-b925-9d60e85349bf" containerID="540b2c8faf004ddc109bc2a35287964cd886e8c6d5b1e68976a40446bbeb8fbc" exitCode=0 Dec 02 14:22:37 crc kubenswrapper[4902]: I1202 14:22:37.712333 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" event={"ID":"073639cb-47da-4542-b925-9d60e85349bf","Type":"ContainerDied","Data":"540b2c8faf004ddc109bc2a35287964cd886e8c6d5b1e68976a40446bbeb8fbc"} Dec 02 14:22:37 crc kubenswrapper[4902]: I1202 14:22:37.714929 4902 generic.go:334] "Generic (PLEG): container finished" podID="045134ab-7002-4252-8a74-b9bf51050548" containerID="a4b8818bdcaab3adf585a93baa966d4676dd74cc331d0d45a4d056b8146f5952" exitCode=0 Dec 02 14:22:37 crc kubenswrapper[4902]: I1202 14:22:37.715110 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" event={"ID":"045134ab-7002-4252-8a74-b9bf51050548","Type":"ContainerDied","Data":"a4b8818bdcaab3adf585a93baa966d4676dd74cc331d0d45a4d056b8146f5952"} Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.310633 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.321593 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.335137 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl"] Dec 02 14:22:38 crc kubenswrapper[4902]: E1202 14:22:38.335414 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="045134ab-7002-4252-8a74-b9bf51050548" containerName="controller-manager" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.335435 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="045134ab-7002-4252-8a74-b9bf51050548" containerName="controller-manager" Dec 02 14:22:38 crc kubenswrapper[4902]: E1202 14:22:38.335446 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="073639cb-47da-4542-b925-9d60e85349bf" containerName="route-controller-manager" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.335453 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="073639cb-47da-4542-b925-9d60e85349bf" containerName="route-controller-manager" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.335553 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="045134ab-7002-4252-8a74-b9bf51050548" containerName="controller-manager" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.335579 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="073639cb-47da-4542-b925-9d60e85349bf" containerName="route-controller-manager" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.335948 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.345049 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl"] Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.407361 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvv9b\" (UniqueName: \"kubernetes.io/projected/045134ab-7002-4252-8a74-b9bf51050548-kube-api-access-lvv9b\") pod \"045134ab-7002-4252-8a74-b9bf51050548\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.407455 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/073639cb-47da-4542-b925-9d60e85349bf-serving-cert\") pod \"073639cb-47da-4542-b925-9d60e85349bf\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.407499 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/045134ab-7002-4252-8a74-b9bf51050548-serving-cert\") pod \"045134ab-7002-4252-8a74-b9bf51050548\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.407536 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-config\") pod \"045134ab-7002-4252-8a74-b9bf51050548\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.407585 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-client-ca\") pod \"045134ab-7002-4252-8a74-b9bf51050548\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.407624 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkv4k\" (UniqueName: \"kubernetes.io/projected/073639cb-47da-4542-b925-9d60e85349bf-kube-api-access-xkv4k\") pod \"073639cb-47da-4542-b925-9d60e85349bf\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.407684 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-proxy-ca-bundles\") pod \"045134ab-7002-4252-8a74-b9bf51050548\" (UID: \"045134ab-7002-4252-8a74-b9bf51050548\") " Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.407707 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/073639cb-47da-4542-b925-9d60e85349bf-config\") pod \"073639cb-47da-4542-b925-9d60e85349bf\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.407721 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/073639cb-47da-4542-b925-9d60e85349bf-client-ca\") pod \"073639cb-47da-4542-b925-9d60e85349bf\" (UID: \"073639cb-47da-4542-b925-9d60e85349bf\") " Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.408025 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa5f615c-305b-428a-bd41-4e983b8ba765-serving-cert\") pod \"route-controller-manager-54948b86b7-4fqwl\" (UID: \"aa5f615c-305b-428a-bd41-4e983b8ba765\") " pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.408074 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa5f615c-305b-428a-bd41-4e983b8ba765-config\") pod \"route-controller-manager-54948b86b7-4fqwl\" (UID: \"aa5f615c-305b-428a-bd41-4e983b8ba765\") " pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.408135 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pmhw\" (UniqueName: \"kubernetes.io/projected/aa5f615c-305b-428a-bd41-4e983b8ba765-kube-api-access-9pmhw\") pod \"route-controller-manager-54948b86b7-4fqwl\" (UID: \"aa5f615c-305b-428a-bd41-4e983b8ba765\") " pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.408174 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aa5f615c-305b-428a-bd41-4e983b8ba765-client-ca\") pod \"route-controller-manager-54948b86b7-4fqwl\" (UID: \"aa5f615c-305b-428a-bd41-4e983b8ba765\") " pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.410310 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "045134ab-7002-4252-8a74-b9bf51050548" (UID: "045134ab-7002-4252-8a74-b9bf51050548"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.411076 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-config" (OuterVolumeSpecName: "config") pod "045134ab-7002-4252-8a74-b9bf51050548" (UID: "045134ab-7002-4252-8a74-b9bf51050548"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.411102 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/073639cb-47da-4542-b925-9d60e85349bf-client-ca" (OuterVolumeSpecName: "client-ca") pod "073639cb-47da-4542-b925-9d60e85349bf" (UID: "073639cb-47da-4542-b925-9d60e85349bf"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.411140 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/073639cb-47da-4542-b925-9d60e85349bf-config" (OuterVolumeSpecName: "config") pod "073639cb-47da-4542-b925-9d60e85349bf" (UID: "073639cb-47da-4542-b925-9d60e85349bf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.411102 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-client-ca" (OuterVolumeSpecName: "client-ca") pod "045134ab-7002-4252-8a74-b9bf51050548" (UID: "045134ab-7002-4252-8a74-b9bf51050548"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.417842 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/045134ab-7002-4252-8a74-b9bf51050548-kube-api-access-lvv9b" (OuterVolumeSpecName: "kube-api-access-lvv9b") pod "045134ab-7002-4252-8a74-b9bf51050548" (UID: "045134ab-7002-4252-8a74-b9bf51050548"). InnerVolumeSpecName "kube-api-access-lvv9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.421979 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/073639cb-47da-4542-b925-9d60e85349bf-kube-api-access-xkv4k" (OuterVolumeSpecName: "kube-api-access-xkv4k") pod "073639cb-47da-4542-b925-9d60e85349bf" (UID: "073639cb-47da-4542-b925-9d60e85349bf"). InnerVolumeSpecName "kube-api-access-xkv4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.422112 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/073639cb-47da-4542-b925-9d60e85349bf-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "073639cb-47da-4542-b925-9d60e85349bf" (UID: "073639cb-47da-4542-b925-9d60e85349bf"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.427847 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/045134ab-7002-4252-8a74-b9bf51050548-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "045134ab-7002-4252-8a74-b9bf51050548" (UID: "045134ab-7002-4252-8a74-b9bf51050548"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.509806 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pmhw\" (UniqueName: \"kubernetes.io/projected/aa5f615c-305b-428a-bd41-4e983b8ba765-kube-api-access-9pmhw\") pod \"route-controller-manager-54948b86b7-4fqwl\" (UID: \"aa5f615c-305b-428a-bd41-4e983b8ba765\") " pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.509870 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aa5f615c-305b-428a-bd41-4e983b8ba765-client-ca\") pod \"route-controller-manager-54948b86b7-4fqwl\" (UID: \"aa5f615c-305b-428a-bd41-4e983b8ba765\") " pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.509921 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa5f615c-305b-428a-bd41-4e983b8ba765-serving-cert\") pod \"route-controller-manager-54948b86b7-4fqwl\" (UID: \"aa5f615c-305b-428a-bd41-4e983b8ba765\") " pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.509958 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa5f615c-305b-428a-bd41-4e983b8ba765-config\") pod \"route-controller-manager-54948b86b7-4fqwl\" (UID: \"aa5f615c-305b-428a-bd41-4e983b8ba765\") " pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.510026 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvv9b\" (UniqueName: \"kubernetes.io/projected/045134ab-7002-4252-8a74-b9bf51050548-kube-api-access-lvv9b\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.510040 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/073639cb-47da-4542-b925-9d60e85349bf-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.510052 4902 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/045134ab-7002-4252-8a74-b9bf51050548-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.510062 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.510072 4902 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.510082 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkv4k\" (UniqueName: \"kubernetes.io/projected/073639cb-47da-4542-b925-9d60e85349bf-kube-api-access-xkv4k\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.510092 4902 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/045134ab-7002-4252-8a74-b9bf51050548-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.510103 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/073639cb-47da-4542-b925-9d60e85349bf-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.510115 4902 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/073639cb-47da-4542-b925-9d60e85349bf-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.510980 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aa5f615c-305b-428a-bd41-4e983b8ba765-client-ca\") pod \"route-controller-manager-54948b86b7-4fqwl\" (UID: \"aa5f615c-305b-428a-bd41-4e983b8ba765\") " pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.511094 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa5f615c-305b-428a-bd41-4e983b8ba765-config\") pod \"route-controller-manager-54948b86b7-4fqwl\" (UID: \"aa5f615c-305b-428a-bd41-4e983b8ba765\") " pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.513957 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa5f615c-305b-428a-bd41-4e983b8ba765-serving-cert\") pod \"route-controller-manager-54948b86b7-4fqwl\" (UID: \"aa5f615c-305b-428a-bd41-4e983b8ba765\") " pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.527300 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pmhw\" (UniqueName: \"kubernetes.io/projected/aa5f615c-305b-428a-bd41-4e983b8ba765-kube-api-access-9pmhw\") pod \"route-controller-manager-54948b86b7-4fqwl\" (UID: \"aa5f615c-305b-428a-bd41-4e983b8ba765\") " pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.651867 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.727535 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.727778 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7ccb65cc84-qctdx" event={"ID":"045134ab-7002-4252-8a74-b9bf51050548","Type":"ContainerDied","Data":"3c490de7320cb429b4a2f523d74311d3c7781f0d65fafd4a5490af2909bc557d"} Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.727862 4902 scope.go:117] "RemoveContainer" containerID="a4b8818bdcaab3adf585a93baa966d4676dd74cc331d0d45a4d056b8146f5952" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.730704 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" event={"ID":"073639cb-47da-4542-b925-9d60e85349bf","Type":"ContainerDied","Data":"01d1a0afac5f5d212e4511b953da9a4871b9a806221fe0a30615509410714754"} Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.730830 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.748963 4902 scope.go:117] "RemoveContainer" containerID="540b2c8faf004ddc109bc2a35287964cd886e8c6d5b1e68976a40446bbeb8fbc" Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.782314 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7ccb65cc84-qctdx"] Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.787084 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7ccb65cc84-qctdx"] Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.792331 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj"] Dec 02 14:22:38 crc kubenswrapper[4902]: I1202 14:22:38.796230 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5756f44d6-zzhhj"] Dec 02 14:22:39 crc kubenswrapper[4902]: I1202 14:22:39.094154 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl"] Dec 02 14:22:39 crc kubenswrapper[4902]: W1202 14:22:39.105405 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa5f615c_305b_428a_bd41_4e983b8ba765.slice/crio-8d2e34f9ecc2eef3b6302475c979341c49277643c9f14ce20ac029cb0a9d2a7d WatchSource:0}: Error finding container 8d2e34f9ecc2eef3b6302475c979341c49277643c9f14ce20ac029cb0a9d2a7d: Status 404 returned error can't find the container with id 8d2e34f9ecc2eef3b6302475c979341c49277643c9f14ce20ac029cb0a9d2a7d Dec 02 14:22:39 crc kubenswrapper[4902]: I1202 14:22:39.113686 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="045134ab-7002-4252-8a74-b9bf51050548" path="/var/lib/kubelet/pods/045134ab-7002-4252-8a74-b9bf51050548/volumes" Dec 02 14:22:39 crc kubenswrapper[4902]: I1202 14:22:39.115048 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="073639cb-47da-4542-b925-9d60e85349bf" path="/var/lib/kubelet/pods/073639cb-47da-4542-b925-9d60e85349bf/volumes" Dec 02 14:22:39 crc kubenswrapper[4902]: I1202 14:22:39.738717 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" event={"ID":"aa5f615c-305b-428a-bd41-4e983b8ba765","Type":"ContainerStarted","Data":"7d0227a5689f772e1f0ca217841de349b27d4abe80373f4d172a964b8ef4004b"} Dec 02 14:22:39 crc kubenswrapper[4902]: I1202 14:22:39.739993 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:39 crc kubenswrapper[4902]: I1202 14:22:39.740083 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" event={"ID":"aa5f615c-305b-428a-bd41-4e983b8ba765","Type":"ContainerStarted","Data":"8d2e34f9ecc2eef3b6302475c979341c49277643c9f14ce20ac029cb0a9d2a7d"} Dec 02 14:22:39 crc kubenswrapper[4902]: I1202 14:22:39.743078 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" Dec 02 14:22:39 crc kubenswrapper[4902]: I1202 14:22:39.756647 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-54948b86b7-4fqwl" podStartSLOduration=3.756625425 podStartE2EDuration="3.756625425s" podCreationTimestamp="2025-12-02 14:22:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:22:39.75504704 +0000 UTC m=+390.946355789" watchObservedRunningTime="2025-12-02 14:22:39.756625425 +0000 UTC m=+390.947934144" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.647979 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7bc7576657-85mj8"] Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.648648 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.650265 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.650523 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.651043 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.654409 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.656720 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.657165 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.663602 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.667548 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7bc7576657-85mj8"] Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.773344 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/beed8f56-1c35-4a33-8c92-47e9d595f0fa-config\") pod \"controller-manager-7bc7576657-85mj8\" (UID: \"beed8f56-1c35-4a33-8c92-47e9d595f0fa\") " pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.773437 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/beed8f56-1c35-4a33-8c92-47e9d595f0fa-serving-cert\") pod \"controller-manager-7bc7576657-85mj8\" (UID: \"beed8f56-1c35-4a33-8c92-47e9d595f0fa\") " pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.773471 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjrfp\" (UniqueName: \"kubernetes.io/projected/beed8f56-1c35-4a33-8c92-47e9d595f0fa-kube-api-access-cjrfp\") pod \"controller-manager-7bc7576657-85mj8\" (UID: \"beed8f56-1c35-4a33-8c92-47e9d595f0fa\") " pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.773503 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/beed8f56-1c35-4a33-8c92-47e9d595f0fa-client-ca\") pod \"controller-manager-7bc7576657-85mj8\" (UID: \"beed8f56-1c35-4a33-8c92-47e9d595f0fa\") " pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.773538 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/beed8f56-1c35-4a33-8c92-47e9d595f0fa-proxy-ca-bundles\") pod \"controller-manager-7bc7576657-85mj8\" (UID: \"beed8f56-1c35-4a33-8c92-47e9d595f0fa\") " pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.875486 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/beed8f56-1c35-4a33-8c92-47e9d595f0fa-config\") pod \"controller-manager-7bc7576657-85mj8\" (UID: \"beed8f56-1c35-4a33-8c92-47e9d595f0fa\") " pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.875629 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/beed8f56-1c35-4a33-8c92-47e9d595f0fa-serving-cert\") pod \"controller-manager-7bc7576657-85mj8\" (UID: \"beed8f56-1c35-4a33-8c92-47e9d595f0fa\") " pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.875693 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjrfp\" (UniqueName: \"kubernetes.io/projected/beed8f56-1c35-4a33-8c92-47e9d595f0fa-kube-api-access-cjrfp\") pod \"controller-manager-7bc7576657-85mj8\" (UID: \"beed8f56-1c35-4a33-8c92-47e9d595f0fa\") " pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.875799 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/beed8f56-1c35-4a33-8c92-47e9d595f0fa-client-ca\") pod \"controller-manager-7bc7576657-85mj8\" (UID: \"beed8f56-1c35-4a33-8c92-47e9d595f0fa\") " pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.875917 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/beed8f56-1c35-4a33-8c92-47e9d595f0fa-proxy-ca-bundles\") pod \"controller-manager-7bc7576657-85mj8\" (UID: \"beed8f56-1c35-4a33-8c92-47e9d595f0fa\") " pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.877985 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/beed8f56-1c35-4a33-8c92-47e9d595f0fa-client-ca\") pod \"controller-manager-7bc7576657-85mj8\" (UID: \"beed8f56-1c35-4a33-8c92-47e9d595f0fa\") " pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.879269 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/beed8f56-1c35-4a33-8c92-47e9d595f0fa-config\") pod \"controller-manager-7bc7576657-85mj8\" (UID: \"beed8f56-1c35-4a33-8c92-47e9d595f0fa\") " pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.882620 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/beed8f56-1c35-4a33-8c92-47e9d595f0fa-proxy-ca-bundles\") pod \"controller-manager-7bc7576657-85mj8\" (UID: \"beed8f56-1c35-4a33-8c92-47e9d595f0fa\") " pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.891043 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/beed8f56-1c35-4a33-8c92-47e9d595f0fa-serving-cert\") pod \"controller-manager-7bc7576657-85mj8\" (UID: \"beed8f56-1c35-4a33-8c92-47e9d595f0fa\") " pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.907172 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjrfp\" (UniqueName: \"kubernetes.io/projected/beed8f56-1c35-4a33-8c92-47e9d595f0fa-kube-api-access-cjrfp\") pod \"controller-manager-7bc7576657-85mj8\" (UID: \"beed8f56-1c35-4a33-8c92-47e9d595f0fa\") " pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:40 crc kubenswrapper[4902]: I1202 14:22:40.965169 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:41 crc kubenswrapper[4902]: W1202 14:22:41.437756 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbeed8f56_1c35_4a33_8c92_47e9d595f0fa.slice/crio-b1d0ae0e8c26b0f8df390b5c9753c4fb2083695ed2378a1781c4d924d430c0c5 WatchSource:0}: Error finding container b1d0ae0e8c26b0f8df390b5c9753c4fb2083695ed2378a1781c4d924d430c0c5: Status 404 returned error can't find the container with id b1d0ae0e8c26b0f8df390b5c9753c4fb2083695ed2378a1781c4d924d430c0c5 Dec 02 14:22:41 crc kubenswrapper[4902]: I1202 14:22:41.442044 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7bc7576657-85mj8"] Dec 02 14:22:41 crc kubenswrapper[4902]: I1202 14:22:41.764089 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" event={"ID":"beed8f56-1c35-4a33-8c92-47e9d595f0fa","Type":"ContainerStarted","Data":"ccd5a789a5814fba08a83732c101a407f5f75582a56c02c994a505d6af7f56e4"} Dec 02 14:22:41 crc kubenswrapper[4902]: I1202 14:22:41.764397 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" event={"ID":"beed8f56-1c35-4a33-8c92-47e9d595f0fa","Type":"ContainerStarted","Data":"b1d0ae0e8c26b0f8df390b5c9753c4fb2083695ed2378a1781c4d924d430c0c5"} Dec 02 14:22:41 crc kubenswrapper[4902]: I1202 14:22:41.781789 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" podStartSLOduration=5.781769318 podStartE2EDuration="5.781769318s" podCreationTimestamp="2025-12-02 14:22:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:22:41.779630458 +0000 UTC m=+392.970939187" watchObservedRunningTime="2025-12-02 14:22:41.781769318 +0000 UTC m=+392.973078037" Dec 02 14:22:42 crc kubenswrapper[4902]: I1202 14:22:42.777965 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:42 crc kubenswrapper[4902]: I1202 14:22:42.784019 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7bc7576657-85mj8" Dec 02 14:22:45 crc kubenswrapper[4902]: I1202 14:22:45.774736 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2hb48"] Dec 02 14:22:45 crc kubenswrapper[4902]: I1202 14:22:45.775834 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2hb48" Dec 02 14:22:45 crc kubenswrapper[4902]: I1202 14:22:45.779039 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 14:22:45 crc kubenswrapper[4902]: I1202 14:22:45.793109 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2hb48"] Dec 02 14:22:45 crc kubenswrapper[4902]: I1202 14:22:45.945288 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rprm\" (UniqueName: \"kubernetes.io/projected/5a130427-ec6d-4743-97a9-42bb4cb308ca-kube-api-access-6rprm\") pod \"certified-operators-2hb48\" (UID: \"5a130427-ec6d-4743-97a9-42bb4cb308ca\") " pod="openshift-marketplace/certified-operators-2hb48" Dec 02 14:22:45 crc kubenswrapper[4902]: I1202 14:22:45.945377 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a130427-ec6d-4743-97a9-42bb4cb308ca-catalog-content\") pod \"certified-operators-2hb48\" (UID: \"5a130427-ec6d-4743-97a9-42bb4cb308ca\") " pod="openshift-marketplace/certified-operators-2hb48" Dec 02 14:22:45 crc kubenswrapper[4902]: I1202 14:22:45.945458 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a130427-ec6d-4743-97a9-42bb4cb308ca-utilities\") pod \"certified-operators-2hb48\" (UID: \"5a130427-ec6d-4743-97a9-42bb4cb308ca\") " pod="openshift-marketplace/certified-operators-2hb48" Dec 02 14:22:45 crc kubenswrapper[4902]: I1202 14:22:45.977874 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-grl6v"] Dec 02 14:22:45 crc kubenswrapper[4902]: I1202 14:22:45.979025 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:22:45 crc kubenswrapper[4902]: I1202 14:22:45.981596 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 14:22:45 crc kubenswrapper[4902]: I1202 14:22:45.988761 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-grl6v"] Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.046683 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rprm\" (UniqueName: \"kubernetes.io/projected/5a130427-ec6d-4743-97a9-42bb4cb308ca-kube-api-access-6rprm\") pod \"certified-operators-2hb48\" (UID: \"5a130427-ec6d-4743-97a9-42bb4cb308ca\") " pod="openshift-marketplace/certified-operators-2hb48" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.046776 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a130427-ec6d-4743-97a9-42bb4cb308ca-catalog-content\") pod \"certified-operators-2hb48\" (UID: \"5a130427-ec6d-4743-97a9-42bb4cb308ca\") " pod="openshift-marketplace/certified-operators-2hb48" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.046831 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a130427-ec6d-4743-97a9-42bb4cb308ca-utilities\") pod \"certified-operators-2hb48\" (UID: \"5a130427-ec6d-4743-97a9-42bb4cb308ca\") " pod="openshift-marketplace/certified-operators-2hb48" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.047329 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a130427-ec6d-4743-97a9-42bb4cb308ca-utilities\") pod \"certified-operators-2hb48\" (UID: \"5a130427-ec6d-4743-97a9-42bb4cb308ca\") " pod="openshift-marketplace/certified-operators-2hb48" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.047398 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a130427-ec6d-4743-97a9-42bb4cb308ca-catalog-content\") pod \"certified-operators-2hb48\" (UID: \"5a130427-ec6d-4743-97a9-42bb4cb308ca\") " pod="openshift-marketplace/certified-operators-2hb48" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.064853 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rprm\" (UniqueName: \"kubernetes.io/projected/5a130427-ec6d-4743-97a9-42bb4cb308ca-kube-api-access-6rprm\") pod \"certified-operators-2hb48\" (UID: \"5a130427-ec6d-4743-97a9-42bb4cb308ca\") " pod="openshift-marketplace/certified-operators-2hb48" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.096186 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2hb48" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.148542 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24b78819-8fca-46b1-91ab-c9b4b5253bbf-utilities\") pod \"community-operators-grl6v\" (UID: \"24b78819-8fca-46b1-91ab-c9b4b5253bbf\") " pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.148615 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24b78819-8fca-46b1-91ab-c9b4b5253bbf-catalog-content\") pod \"community-operators-grl6v\" (UID: \"24b78819-8fca-46b1-91ab-c9b4b5253bbf\") " pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.148665 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwp29\" (UniqueName: \"kubernetes.io/projected/24b78819-8fca-46b1-91ab-c9b4b5253bbf-kube-api-access-jwp29\") pod \"community-operators-grl6v\" (UID: \"24b78819-8fca-46b1-91ab-c9b4b5253bbf\") " pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.250196 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24b78819-8fca-46b1-91ab-c9b4b5253bbf-utilities\") pod \"community-operators-grl6v\" (UID: \"24b78819-8fca-46b1-91ab-c9b4b5253bbf\") " pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.250645 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24b78819-8fca-46b1-91ab-c9b4b5253bbf-catalog-content\") pod \"community-operators-grl6v\" (UID: \"24b78819-8fca-46b1-91ab-c9b4b5253bbf\") " pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.250692 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24b78819-8fca-46b1-91ab-c9b4b5253bbf-utilities\") pod \"community-operators-grl6v\" (UID: \"24b78819-8fca-46b1-91ab-c9b4b5253bbf\") " pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.250739 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwp29\" (UniqueName: \"kubernetes.io/projected/24b78819-8fca-46b1-91ab-c9b4b5253bbf-kube-api-access-jwp29\") pod \"community-operators-grl6v\" (UID: \"24b78819-8fca-46b1-91ab-c9b4b5253bbf\") " pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.251178 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24b78819-8fca-46b1-91ab-c9b4b5253bbf-catalog-content\") pod \"community-operators-grl6v\" (UID: \"24b78819-8fca-46b1-91ab-c9b4b5253bbf\") " pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.267974 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwp29\" (UniqueName: \"kubernetes.io/projected/24b78819-8fca-46b1-91ab-c9b4b5253bbf-kube-api-access-jwp29\") pod \"community-operators-grl6v\" (UID: \"24b78819-8fca-46b1-91ab-c9b4b5253bbf\") " pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.293972 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.494597 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2hb48"] Dec 02 14:22:46 crc kubenswrapper[4902]: W1202 14:22:46.498037 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5a130427_ec6d_4743_97a9_42bb4cb308ca.slice/crio-c9f8e19bea5712a296063c3b2757e831e1e527fd41ac6ed8c78a3e200539d60f WatchSource:0}: Error finding container c9f8e19bea5712a296063c3b2757e831e1e527fd41ac6ed8c78a3e200539d60f: Status 404 returned error can't find the container with id c9f8e19bea5712a296063c3b2757e831e1e527fd41ac6ed8c78a3e200539d60f Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.719070 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-grl6v"] Dec 02 14:22:46 crc kubenswrapper[4902]: W1202 14:22:46.732282 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod24b78819_8fca_46b1_91ab_c9b4b5253bbf.slice/crio-4f6cc1b93db323e06433fdbfa6e0fa15a09e24ffcd4cbe7c408e54aa9f8f8889 WatchSource:0}: Error finding container 4f6cc1b93db323e06433fdbfa6e0fa15a09e24ffcd4cbe7c408e54aa9f8f8889: Status 404 returned error can't find the container with id 4f6cc1b93db323e06433fdbfa6e0fa15a09e24ffcd4cbe7c408e54aa9f8f8889 Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.797306 4902 generic.go:334] "Generic (PLEG): container finished" podID="5a130427-ec6d-4743-97a9-42bb4cb308ca" containerID="b664d8b0e52afb6cf267b4cd7ad19b951694c5f980f4201d30b9cf2b6a70ef24" exitCode=0 Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.797423 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2hb48" event={"ID":"5a130427-ec6d-4743-97a9-42bb4cb308ca","Type":"ContainerDied","Data":"b664d8b0e52afb6cf267b4cd7ad19b951694c5f980f4201d30b9cf2b6a70ef24"} Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.798741 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2hb48" event={"ID":"5a130427-ec6d-4743-97a9-42bb4cb308ca","Type":"ContainerStarted","Data":"c9f8e19bea5712a296063c3b2757e831e1e527fd41ac6ed8c78a3e200539d60f"} Dec 02 14:22:46 crc kubenswrapper[4902]: I1202 14:22:46.801231 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grl6v" event={"ID":"24b78819-8fca-46b1-91ab-c9b4b5253bbf","Type":"ContainerStarted","Data":"4f6cc1b93db323e06433fdbfa6e0fa15a09e24ffcd4cbe7c408e54aa9f8f8889"} Dec 02 14:22:47 crc kubenswrapper[4902]: I1202 14:22:47.807862 4902 generic.go:334] "Generic (PLEG): container finished" podID="24b78819-8fca-46b1-91ab-c9b4b5253bbf" containerID="8ebf0606bb35868eb3cfdb3eb3506e08364bcb8705ce746bebcd31dbc539513e" exitCode=0 Dec 02 14:22:47 crc kubenswrapper[4902]: I1202 14:22:47.807901 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grl6v" event={"ID":"24b78819-8fca-46b1-91ab-c9b4b5253bbf","Type":"ContainerDied","Data":"8ebf0606bb35868eb3cfdb3eb3506e08364bcb8705ce746bebcd31dbc539513e"} Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.175838 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wmg9h"] Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.177513 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wmg9h" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.180280 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.195735 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wmg9h"] Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.275879 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d366d244-dcc5-4685-9ee5-73990c8d6cb6-catalog-content\") pod \"redhat-marketplace-wmg9h\" (UID: \"d366d244-dcc5-4685-9ee5-73990c8d6cb6\") " pod="openshift-marketplace/redhat-marketplace-wmg9h" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.275936 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d366d244-dcc5-4685-9ee5-73990c8d6cb6-utilities\") pod \"redhat-marketplace-wmg9h\" (UID: \"d366d244-dcc5-4685-9ee5-73990c8d6cb6\") " pod="openshift-marketplace/redhat-marketplace-wmg9h" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.275988 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dt7r\" (UniqueName: \"kubernetes.io/projected/d366d244-dcc5-4685-9ee5-73990c8d6cb6-kube-api-access-7dt7r\") pod \"redhat-marketplace-wmg9h\" (UID: \"d366d244-dcc5-4685-9ee5-73990c8d6cb6\") " pod="openshift-marketplace/redhat-marketplace-wmg9h" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.373600 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-p8dbc"] Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.375010 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.376786 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dt7r\" (UniqueName: \"kubernetes.io/projected/d366d244-dcc5-4685-9ee5-73990c8d6cb6-kube-api-access-7dt7r\") pod \"redhat-marketplace-wmg9h\" (UID: \"d366d244-dcc5-4685-9ee5-73990c8d6cb6\") " pod="openshift-marketplace/redhat-marketplace-wmg9h" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.376862 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d366d244-dcc5-4685-9ee5-73990c8d6cb6-catalog-content\") pod \"redhat-marketplace-wmg9h\" (UID: \"d366d244-dcc5-4685-9ee5-73990c8d6cb6\") " pod="openshift-marketplace/redhat-marketplace-wmg9h" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.376897 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d366d244-dcc5-4685-9ee5-73990c8d6cb6-utilities\") pod \"redhat-marketplace-wmg9h\" (UID: \"d366d244-dcc5-4685-9ee5-73990c8d6cb6\") " pod="openshift-marketplace/redhat-marketplace-wmg9h" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.377631 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d366d244-dcc5-4685-9ee5-73990c8d6cb6-utilities\") pod \"redhat-marketplace-wmg9h\" (UID: \"d366d244-dcc5-4685-9ee5-73990c8d6cb6\") " pod="openshift-marketplace/redhat-marketplace-wmg9h" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.377956 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.378019 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d366d244-dcc5-4685-9ee5-73990c8d6cb6-catalog-content\") pod \"redhat-marketplace-wmg9h\" (UID: \"d366d244-dcc5-4685-9ee5-73990c8d6cb6\") " pod="openshift-marketplace/redhat-marketplace-wmg9h" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.391105 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p8dbc"] Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.400662 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dt7r\" (UniqueName: \"kubernetes.io/projected/d366d244-dcc5-4685-9ee5-73990c8d6cb6-kube-api-access-7dt7r\") pod \"redhat-marketplace-wmg9h\" (UID: \"d366d244-dcc5-4685-9ee5-73990c8d6cb6\") " pod="openshift-marketplace/redhat-marketplace-wmg9h" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.478033 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssh45\" (UniqueName: \"kubernetes.io/projected/cb24e9b8-9890-479a-9248-e978a02e6823-kube-api-access-ssh45\") pod \"redhat-operators-p8dbc\" (UID: \"cb24e9b8-9890-479a-9248-e978a02e6823\") " pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.478117 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb24e9b8-9890-479a-9248-e978a02e6823-utilities\") pod \"redhat-operators-p8dbc\" (UID: \"cb24e9b8-9890-479a-9248-e978a02e6823\") " pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.478224 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb24e9b8-9890-479a-9248-e978a02e6823-catalog-content\") pod \"redhat-operators-p8dbc\" (UID: \"cb24e9b8-9890-479a-9248-e978a02e6823\") " pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.530291 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wmg9h" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.579514 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb24e9b8-9890-479a-9248-e978a02e6823-catalog-content\") pod \"redhat-operators-p8dbc\" (UID: \"cb24e9b8-9890-479a-9248-e978a02e6823\") " pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.579633 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssh45\" (UniqueName: \"kubernetes.io/projected/cb24e9b8-9890-479a-9248-e978a02e6823-kube-api-access-ssh45\") pod \"redhat-operators-p8dbc\" (UID: \"cb24e9b8-9890-479a-9248-e978a02e6823\") " pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.579680 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb24e9b8-9890-479a-9248-e978a02e6823-utilities\") pod \"redhat-operators-p8dbc\" (UID: \"cb24e9b8-9890-479a-9248-e978a02e6823\") " pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.580152 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb24e9b8-9890-479a-9248-e978a02e6823-catalog-content\") pod \"redhat-operators-p8dbc\" (UID: \"cb24e9b8-9890-479a-9248-e978a02e6823\") " pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.580315 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb24e9b8-9890-479a-9248-e978a02e6823-utilities\") pod \"redhat-operators-p8dbc\" (UID: \"cb24e9b8-9890-479a-9248-e978a02e6823\") " pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.597415 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssh45\" (UniqueName: \"kubernetes.io/projected/cb24e9b8-9890-479a-9248-e978a02e6823-kube-api-access-ssh45\") pod \"redhat-operators-p8dbc\" (UID: \"cb24e9b8-9890-479a-9248-e978a02e6823\") " pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.734937 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.818978 4902 generic.go:334] "Generic (PLEG): container finished" podID="5a130427-ec6d-4743-97a9-42bb4cb308ca" containerID="132caba4fea55f3f40867a7f95e2f8996ff180726ff744c0af905a4cbfe82753" exitCode=0 Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.819023 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2hb48" event={"ID":"5a130427-ec6d-4743-97a9-42bb4cb308ca","Type":"ContainerDied","Data":"132caba4fea55f3f40867a7f95e2f8996ff180726ff744c0af905a4cbfe82753"} Dec 02 14:22:48 crc kubenswrapper[4902]: I1202 14:22:48.965482 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wmg9h"] Dec 02 14:22:48 crc kubenswrapper[4902]: W1202 14:22:48.977178 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd366d244_dcc5_4685_9ee5_73990c8d6cb6.slice/crio-bced4b4d1b15052b98474e979c42d8db20b6004e18a3933452af4089a7705829 WatchSource:0}: Error finding container bced4b4d1b15052b98474e979c42d8db20b6004e18a3933452af4089a7705829: Status 404 returned error can't find the container with id bced4b4d1b15052b98474e979c42d8db20b6004e18a3933452af4089a7705829 Dec 02 14:22:49 crc kubenswrapper[4902]: I1202 14:22:49.154146 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p8dbc"] Dec 02 14:22:49 crc kubenswrapper[4902]: W1202 14:22:49.192143 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb24e9b8_9890_479a_9248_e978a02e6823.slice/crio-472c37555383ef250db2ada069edf49e16bb7990e2593631779dba030f5b3173 WatchSource:0}: Error finding container 472c37555383ef250db2ada069edf49e16bb7990e2593631779dba030f5b3173: Status 404 returned error can't find the container with id 472c37555383ef250db2ada069edf49e16bb7990e2593631779dba030f5b3173 Dec 02 14:22:49 crc kubenswrapper[4902]: I1202 14:22:49.833335 4902 generic.go:334] "Generic (PLEG): container finished" podID="cb24e9b8-9890-479a-9248-e978a02e6823" containerID="948c86173574812673b72454e5d9596e16f52eecb0e06ee87d46e6f59eabdfc1" exitCode=0 Dec 02 14:22:49 crc kubenswrapper[4902]: I1202 14:22:49.833459 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8dbc" event={"ID":"cb24e9b8-9890-479a-9248-e978a02e6823","Type":"ContainerDied","Data":"948c86173574812673b72454e5d9596e16f52eecb0e06ee87d46e6f59eabdfc1"} Dec 02 14:22:49 crc kubenswrapper[4902]: I1202 14:22:49.833841 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8dbc" event={"ID":"cb24e9b8-9890-479a-9248-e978a02e6823","Type":"ContainerStarted","Data":"472c37555383ef250db2ada069edf49e16bb7990e2593631779dba030f5b3173"} Dec 02 14:22:49 crc kubenswrapper[4902]: I1202 14:22:49.835295 4902 generic.go:334] "Generic (PLEG): container finished" podID="d366d244-dcc5-4685-9ee5-73990c8d6cb6" containerID="15558f03f26eab58a83934edf368af9797cef7395b9904236a6cd87bf480f3ca" exitCode=0 Dec 02 14:22:49 crc kubenswrapper[4902]: I1202 14:22:49.835387 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wmg9h" event={"ID":"d366d244-dcc5-4685-9ee5-73990c8d6cb6","Type":"ContainerDied","Data":"15558f03f26eab58a83934edf368af9797cef7395b9904236a6cd87bf480f3ca"} Dec 02 14:22:49 crc kubenswrapper[4902]: I1202 14:22:49.835416 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wmg9h" event={"ID":"d366d244-dcc5-4685-9ee5-73990c8d6cb6","Type":"ContainerStarted","Data":"bced4b4d1b15052b98474e979c42d8db20b6004e18a3933452af4089a7705829"} Dec 02 14:22:49 crc kubenswrapper[4902]: I1202 14:22:49.841647 4902 generic.go:334] "Generic (PLEG): container finished" podID="24b78819-8fca-46b1-91ab-c9b4b5253bbf" containerID="d871237d88f1214acb927ef3fb3ad0a99d1a8af44a2a73bfe77a123550bb461c" exitCode=0 Dec 02 14:22:49 crc kubenswrapper[4902]: I1202 14:22:49.841706 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grl6v" event={"ID":"24b78819-8fca-46b1-91ab-c9b4b5253bbf","Type":"ContainerDied","Data":"d871237d88f1214acb927ef3fb3ad0a99d1a8af44a2a73bfe77a123550bb461c"} Dec 02 14:22:50 crc kubenswrapper[4902]: I1202 14:22:50.848812 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8dbc" event={"ID":"cb24e9b8-9890-479a-9248-e978a02e6823","Type":"ContainerStarted","Data":"5efaf2b33a31f007f55e3979e72df7e2e23ebb81a947a48c4ce9416479cf2157"} Dec 02 14:22:50 crc kubenswrapper[4902]: I1202 14:22:50.851625 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2hb48" event={"ID":"5a130427-ec6d-4743-97a9-42bb4cb308ca","Type":"ContainerStarted","Data":"0f714f88d222728cdfaab706159ff82f29e7c3438bd0093c41146f820c3a850a"} Dec 02 14:22:50 crc kubenswrapper[4902]: I1202 14:22:50.854452 4902 generic.go:334] "Generic (PLEG): container finished" podID="d366d244-dcc5-4685-9ee5-73990c8d6cb6" containerID="46709324c322b0e1f66aefdae03a04f0f5530ba3f0c8d99a636172d423aa287f" exitCode=0 Dec 02 14:22:50 crc kubenswrapper[4902]: I1202 14:22:50.854537 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wmg9h" event={"ID":"d366d244-dcc5-4685-9ee5-73990c8d6cb6","Type":"ContainerDied","Data":"46709324c322b0e1f66aefdae03a04f0f5530ba3f0c8d99a636172d423aa287f"} Dec 02 14:22:50 crc kubenswrapper[4902]: I1202 14:22:50.861127 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grl6v" event={"ID":"24b78819-8fca-46b1-91ab-c9b4b5253bbf","Type":"ContainerStarted","Data":"e0eb7bd815cbcd835733040a478ec173f3a37278d510d31b35b1bf06858814b6"} Dec 02 14:22:50 crc kubenswrapper[4902]: I1202 14:22:50.892493 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2hb48" podStartSLOduration=2.876839999 podStartE2EDuration="5.892476708s" podCreationTimestamp="2025-12-02 14:22:45 +0000 UTC" firstStartedPulling="2025-12-02 14:22:46.798894668 +0000 UTC m=+397.990203377" lastFinishedPulling="2025-12-02 14:22:49.814531377 +0000 UTC m=+401.005840086" observedRunningTime="2025-12-02 14:22:50.889058362 +0000 UTC m=+402.080367071" watchObservedRunningTime="2025-12-02 14:22:50.892476708 +0000 UTC m=+402.083785417" Dec 02 14:22:50 crc kubenswrapper[4902]: I1202 14:22:50.907029 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-grl6v" podStartSLOduration=3.328306944 podStartE2EDuration="5.907015997s" podCreationTimestamp="2025-12-02 14:22:45 +0000 UTC" firstStartedPulling="2025-12-02 14:22:47.80931765 +0000 UTC m=+399.000626359" lastFinishedPulling="2025-12-02 14:22:50.388026703 +0000 UTC m=+401.579335412" observedRunningTime="2025-12-02 14:22:50.904381683 +0000 UTC m=+402.095690392" watchObservedRunningTime="2025-12-02 14:22:50.907015997 +0000 UTC m=+402.098324706" Dec 02 14:22:51 crc kubenswrapper[4902]: I1202 14:22:51.867107 4902 generic.go:334] "Generic (PLEG): container finished" podID="cb24e9b8-9890-479a-9248-e978a02e6823" containerID="5efaf2b33a31f007f55e3979e72df7e2e23ebb81a947a48c4ce9416479cf2157" exitCode=0 Dec 02 14:22:51 crc kubenswrapper[4902]: I1202 14:22:51.867195 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8dbc" event={"ID":"cb24e9b8-9890-479a-9248-e978a02e6823","Type":"ContainerDied","Data":"5efaf2b33a31f007f55e3979e72df7e2e23ebb81a947a48c4ce9416479cf2157"} Dec 02 14:22:51 crc kubenswrapper[4902]: I1202 14:22:51.870745 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wmg9h" event={"ID":"d366d244-dcc5-4685-9ee5-73990c8d6cb6","Type":"ContainerStarted","Data":"93407f73a7ffbcd8d5726828bdee531d42a70b589b848129567a90760ae26e77"} Dec 02 14:22:51 crc kubenswrapper[4902]: I1202 14:22:51.907076 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wmg9h" podStartSLOduration=2.355950991 podStartE2EDuration="3.907056177s" podCreationTimestamp="2025-12-02 14:22:48 +0000 UTC" firstStartedPulling="2025-12-02 14:22:49.83918949 +0000 UTC m=+401.030498199" lastFinishedPulling="2025-12-02 14:22:51.390294636 +0000 UTC m=+402.581603385" observedRunningTime="2025-12-02 14:22:51.901717637 +0000 UTC m=+403.093026346" watchObservedRunningTime="2025-12-02 14:22:51.907056177 +0000 UTC m=+403.098364886" Dec 02 14:22:52 crc kubenswrapper[4902]: I1202 14:22:52.877595 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8dbc" event={"ID":"cb24e9b8-9890-479a-9248-e978a02e6823","Type":"ContainerStarted","Data":"2f0f9294c7af587721c78bc60d840bde3b236c08273e40522c3224a438afb980"} Dec 02 14:22:52 crc kubenswrapper[4902]: I1202 14:22:52.900502 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-p8dbc" podStartSLOduration=2.41001086 podStartE2EDuration="4.900483441s" podCreationTimestamp="2025-12-02 14:22:48 +0000 UTC" firstStartedPulling="2025-12-02 14:22:49.836032961 +0000 UTC m=+401.027341660" lastFinishedPulling="2025-12-02 14:22:52.326505532 +0000 UTC m=+403.517814241" observedRunningTime="2025-12-02 14:22:52.898991219 +0000 UTC m=+404.090299968" watchObservedRunningTime="2025-12-02 14:22:52.900483441 +0000 UTC m=+404.091792150" Dec 02 14:22:54 crc kubenswrapper[4902]: I1202 14:22:54.503720 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-jsgkf" Dec 02 14:22:54 crc kubenswrapper[4902]: I1202 14:22:54.640671 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2sdv9"] Dec 02 14:22:56 crc kubenswrapper[4902]: I1202 14:22:56.096470 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2hb48" Dec 02 14:22:56 crc kubenswrapper[4902]: I1202 14:22:56.096895 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2hb48" Dec 02 14:22:56 crc kubenswrapper[4902]: I1202 14:22:56.136341 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2hb48" Dec 02 14:22:56 crc kubenswrapper[4902]: I1202 14:22:56.294976 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:22:56 crc kubenswrapper[4902]: I1202 14:22:56.295044 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:22:56 crc kubenswrapper[4902]: I1202 14:22:56.348027 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:22:56 crc kubenswrapper[4902]: I1202 14:22:56.938203 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2hb48" Dec 02 14:22:56 crc kubenswrapper[4902]: I1202 14:22:56.953854 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:22:58 crc kubenswrapper[4902]: I1202 14:22:58.531042 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wmg9h" Dec 02 14:22:58 crc kubenswrapper[4902]: I1202 14:22:58.531302 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wmg9h" Dec 02 14:22:58 crc kubenswrapper[4902]: I1202 14:22:58.597541 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wmg9h" Dec 02 14:22:58 crc kubenswrapper[4902]: I1202 14:22:58.735631 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 14:22:58 crc kubenswrapper[4902]: I1202 14:22:58.736049 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 14:22:58 crc kubenswrapper[4902]: I1202 14:22:58.770419 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 14:22:58 crc kubenswrapper[4902]: I1202 14:22:58.980318 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 14:22:58 crc kubenswrapper[4902]: I1202 14:22:58.983610 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wmg9h" Dec 02 14:23:02 crc kubenswrapper[4902]: I1202 14:23:02.857747 4902 patch_prober.go:28] interesting pod/router-default-5444994796-lgl7q container/router namespace/openshift-ingress: Readiness probe status=failure output="Get \"http://localhost:1936/healthz/ready\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 14:23:02 crc kubenswrapper[4902]: I1202 14:23:02.858059 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-ingress/router-default-5444994796-lgl7q" podUID="b853669d-191c-4a38-96c8-290914042f96" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 14:23:04 crc kubenswrapper[4902]: I1202 14:23:04.731481 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:23:04 crc kubenswrapper[4902]: I1202 14:23:04.731553 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:23:09 crc kubenswrapper[4902]: I1202 14:23:09.289290 4902 scope.go:117] "RemoveContainer" containerID="f4b97140c4d76a4eb9a580d02436507380536dbc21663b273ffd3a21ac6ce95b" Dec 02 14:23:09 crc kubenswrapper[4902]: I1202 14:23:09.315975 4902 scope.go:117] "RemoveContainer" containerID="789c169e3460c580458abf24ff5c6697c6500a5a3eb0b826149a66dc919f5676" Dec 02 14:23:09 crc kubenswrapper[4902]: I1202 14:23:09.334057 4902 scope.go:117] "RemoveContainer" containerID="9ec46eaca3208233d9f0d2c04bca7adeea1790962a86c2591b6288bfcb2a2b8b" Dec 02 14:23:09 crc kubenswrapper[4902]: I1202 14:23:09.353318 4902 scope.go:117] "RemoveContainer" containerID="45a9d01a5d1c248ae68397e21b328936b528504ba8aa8cb74a66a249334b9004" Dec 02 14:23:09 crc kubenswrapper[4902]: I1202 14:23:09.373437 4902 scope.go:117] "RemoveContainer" containerID="eabc6a042c2cd427a88620463d269972f7a0b141c63a2863c542ac7b398c4106" Dec 02 14:23:09 crc kubenswrapper[4902]: I1202 14:23:09.389879 4902 scope.go:117] "RemoveContainer" containerID="e0e1c2a14d39bf37f47945ac0e3cb6451455130b0b7dc9ca1652b88e522138f1" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:19.682605 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" podUID="5af6da25-fad3-4ac3-bb6b-f8b2169f8992" containerName="registry" containerID="cri-o://fbaf5a37fa423b9cb0e1593e19d6c30219196e56f6536454aaa5ad1c897003b8" gracePeriod=30 Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.059461 4902 generic.go:334] "Generic (PLEG): container finished" podID="5af6da25-fad3-4ac3-bb6b-f8b2169f8992" containerID="fbaf5a37fa423b9cb0e1593e19d6c30219196e56f6536454aaa5ad1c897003b8" exitCode=0 Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.059608 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" event={"ID":"5af6da25-fad3-4ac3-bb6b-f8b2169f8992","Type":"ContainerDied","Data":"fbaf5a37fa423b9cb0e1593e19d6c30219196e56f6536454aaa5ad1c897003b8"} Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.647153 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.791612 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-trusted-ca\") pod \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.791670 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tplxn\" (UniqueName: \"kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-kube-api-access-tplxn\") pod \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.791699 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-registry-certificates\") pod \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.791742 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-bound-sa-token\") pod \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.791801 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-ca-trust-extracted\") pod \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.791824 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-registry-tls\") pod \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.792578 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "5af6da25-fad3-4ac3-bb6b-f8b2169f8992" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.792629 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "5af6da25-fad3-4ac3-bb6b-f8b2169f8992" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.796690 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-installation-pull-secrets\") pod \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.796869 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\" (UID: \"5af6da25-fad3-4ac3-bb6b-f8b2169f8992\") " Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.797225 4902 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.797238 4902 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.797222 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "5af6da25-fad3-4ac3-bb6b-f8b2169f8992" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.797405 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-kube-api-access-tplxn" (OuterVolumeSpecName: "kube-api-access-tplxn") pod "5af6da25-fad3-4ac3-bb6b-f8b2169f8992" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992"). InnerVolumeSpecName "kube-api-access-tplxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.806022 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "5af6da25-fad3-4ac3-bb6b-f8b2169f8992" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.806190 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "5af6da25-fad3-4ac3-bb6b-f8b2169f8992" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.811265 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "5af6da25-fad3-4ac3-bb6b-f8b2169f8992" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.898055 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tplxn\" (UniqueName: \"kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-kube-api-access-tplxn\") on node \"crc\" DevicePath \"\"" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.898075 4902 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.898086 4902 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.898093 4902 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.898103 4902 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5af6da25-fad3-4ac3-bb6b-f8b2169f8992-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 02 14:23:20 crc kubenswrapper[4902]: I1202 14:23:20.917201 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "5af6da25-fad3-4ac3-bb6b-f8b2169f8992" (UID: "5af6da25-fad3-4ac3-bb6b-f8b2169f8992"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 14:23:21 crc kubenswrapper[4902]: I1202 14:23:21.067333 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" event={"ID":"5af6da25-fad3-4ac3-bb6b-f8b2169f8992","Type":"ContainerDied","Data":"938a0a2087b3aebd4e72a40d970c486ed388d06b3578532b0392ddda60d0c3ed"} Dec 02 14:23:21 crc kubenswrapper[4902]: I1202 14:23:21.067725 4902 scope.go:117] "RemoveContainer" containerID="fbaf5a37fa423b9cb0e1593e19d6c30219196e56f6536454aaa5ad1c897003b8" Dec 02 14:23:21 crc kubenswrapper[4902]: I1202 14:23:21.067578 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2sdv9" Dec 02 14:23:21 crc kubenswrapper[4902]: I1202 14:23:21.105249 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2sdv9"] Dec 02 14:23:21 crc kubenswrapper[4902]: I1202 14:23:21.118461 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2sdv9"] Dec 02 14:23:23 crc kubenswrapper[4902]: I1202 14:23:23.116173 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5af6da25-fad3-4ac3-bb6b-f8b2169f8992" path="/var/lib/kubelet/pods/5af6da25-fad3-4ac3-bb6b-f8b2169f8992/volumes" Dec 02 14:23:34 crc kubenswrapper[4902]: I1202 14:23:34.732105 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:23:34 crc kubenswrapper[4902]: I1202 14:23:34.732595 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:23:34 crc kubenswrapper[4902]: I1202 14:23:34.732668 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:23:34 crc kubenswrapper[4902]: I1202 14:23:34.733417 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cfd4c9507221549852cd9e29206bbded528186bac0d16f1fe3d9e7ce1e5270e2"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 14:23:34 crc kubenswrapper[4902]: I1202 14:23:34.733505 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://cfd4c9507221549852cd9e29206bbded528186bac0d16f1fe3d9e7ce1e5270e2" gracePeriod=600 Dec 02 14:23:35 crc kubenswrapper[4902]: I1202 14:23:35.158355 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="cfd4c9507221549852cd9e29206bbded528186bac0d16f1fe3d9e7ce1e5270e2" exitCode=0 Dec 02 14:23:35 crc kubenswrapper[4902]: I1202 14:23:35.158439 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"cfd4c9507221549852cd9e29206bbded528186bac0d16f1fe3d9e7ce1e5270e2"} Dec 02 14:23:35 crc kubenswrapper[4902]: I1202 14:23:35.158751 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"118552f466a40a8de3c430e2d713a73773e397381466b0fec04a29e1d45b39b2"} Dec 02 14:23:35 crc kubenswrapper[4902]: I1202 14:23:35.158775 4902 scope.go:117] "RemoveContainer" containerID="44b7572700be495628a4272717af28819598f668c30dbc7fc8d5edffc449185d" Dec 02 14:26:04 crc kubenswrapper[4902]: I1202 14:26:04.731960 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:26:04 crc kubenswrapper[4902]: I1202 14:26:04.732811 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:26:34 crc kubenswrapper[4902]: I1202 14:26:34.732051 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:26:34 crc kubenswrapper[4902]: I1202 14:26:34.732832 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:27:04 crc kubenswrapper[4902]: I1202 14:27:04.731689 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:27:04 crc kubenswrapper[4902]: I1202 14:27:04.732348 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:27:04 crc kubenswrapper[4902]: I1202 14:27:04.732404 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:27:04 crc kubenswrapper[4902]: I1202 14:27:04.733396 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"118552f466a40a8de3c430e2d713a73773e397381466b0fec04a29e1d45b39b2"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 14:27:04 crc kubenswrapper[4902]: I1202 14:27:04.733515 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://118552f466a40a8de3c430e2d713a73773e397381466b0fec04a29e1d45b39b2" gracePeriod=600 Dec 02 14:27:05 crc kubenswrapper[4902]: I1202 14:27:05.471959 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="118552f466a40a8de3c430e2d713a73773e397381466b0fec04a29e1d45b39b2" exitCode=0 Dec 02 14:27:05 crc kubenswrapper[4902]: I1202 14:27:05.472032 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"118552f466a40a8de3c430e2d713a73773e397381466b0fec04a29e1d45b39b2"} Dec 02 14:27:05 crc kubenswrapper[4902]: I1202 14:27:05.472832 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"4d3cfcf826d5c881989febb8098e15f6289e747eef104be9d1459d767e21b0fc"} Dec 02 14:27:05 crc kubenswrapper[4902]: I1202 14:27:05.473141 4902 scope.go:117] "RemoveContainer" containerID="cfd4c9507221549852cd9e29206bbded528186bac0d16f1fe3d9e7ce1e5270e2" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.254413 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-bj9d5"] Dec 02 14:27:23 crc kubenswrapper[4902]: E1202 14:27:23.255319 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5af6da25-fad3-4ac3-bb6b-f8b2169f8992" containerName="registry" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.255341 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="5af6da25-fad3-4ac3-bb6b-f8b2169f8992" containerName="registry" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.255479 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="5af6da25-fad3-4ac3-bb6b-f8b2169f8992" containerName="registry" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.256058 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-bj9d5" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.258067 4902 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-rxf6v" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.258382 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.258454 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.270966 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-bj9d5"] Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.291628 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-c8sjz"] Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.292347 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-c8sjz" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.294195 4902 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-khddz" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.315511 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-c8sjz"] Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.318864 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-wvs9t"] Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.319862 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-wvs9t" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.322030 4902 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-clfxr" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.333086 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-wvs9t"] Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.434436 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xv5t\" (UniqueName: \"kubernetes.io/projected/1b016533-08b9-464a-8956-1c236d28e036-kube-api-access-7xv5t\") pod \"cert-manager-webhook-5655c58dd6-wvs9t\" (UID: \"1b016533-08b9-464a-8956-1c236d28e036\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-wvs9t" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.434517 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv4rn\" (UniqueName: \"kubernetes.io/projected/c4d2337b-c3b1-4759-999d-ab47b754a817-kube-api-access-sv4rn\") pod \"cert-manager-cainjector-7f985d654d-bj9d5\" (UID: \"c4d2337b-c3b1-4759-999d-ab47b754a817\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-bj9d5" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.434578 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phv4m\" (UniqueName: \"kubernetes.io/projected/0c33663a-0d99-4a94-bb43-46f102098870-kube-api-access-phv4m\") pod \"cert-manager-5b446d88c5-c8sjz\" (UID: \"0c33663a-0d99-4a94-bb43-46f102098870\") " pod="cert-manager/cert-manager-5b446d88c5-c8sjz" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.535855 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xv5t\" (UniqueName: \"kubernetes.io/projected/1b016533-08b9-464a-8956-1c236d28e036-kube-api-access-7xv5t\") pod \"cert-manager-webhook-5655c58dd6-wvs9t\" (UID: \"1b016533-08b9-464a-8956-1c236d28e036\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-wvs9t" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.535917 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv4rn\" (UniqueName: \"kubernetes.io/projected/c4d2337b-c3b1-4759-999d-ab47b754a817-kube-api-access-sv4rn\") pod \"cert-manager-cainjector-7f985d654d-bj9d5\" (UID: \"c4d2337b-c3b1-4759-999d-ab47b754a817\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-bj9d5" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.535948 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phv4m\" (UniqueName: \"kubernetes.io/projected/0c33663a-0d99-4a94-bb43-46f102098870-kube-api-access-phv4m\") pod \"cert-manager-5b446d88c5-c8sjz\" (UID: \"0c33663a-0d99-4a94-bb43-46f102098870\") " pod="cert-manager/cert-manager-5b446d88c5-c8sjz" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.554747 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phv4m\" (UniqueName: \"kubernetes.io/projected/0c33663a-0d99-4a94-bb43-46f102098870-kube-api-access-phv4m\") pod \"cert-manager-5b446d88c5-c8sjz\" (UID: \"0c33663a-0d99-4a94-bb43-46f102098870\") " pod="cert-manager/cert-manager-5b446d88c5-c8sjz" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.554893 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv4rn\" (UniqueName: \"kubernetes.io/projected/c4d2337b-c3b1-4759-999d-ab47b754a817-kube-api-access-sv4rn\") pod \"cert-manager-cainjector-7f985d654d-bj9d5\" (UID: \"c4d2337b-c3b1-4759-999d-ab47b754a817\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-bj9d5" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.554816 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xv5t\" (UniqueName: \"kubernetes.io/projected/1b016533-08b9-464a-8956-1c236d28e036-kube-api-access-7xv5t\") pod \"cert-manager-webhook-5655c58dd6-wvs9t\" (UID: \"1b016533-08b9-464a-8956-1c236d28e036\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-wvs9t" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.574775 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-bj9d5" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.605322 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-c8sjz" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.634750 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-wvs9t" Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.768549 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-bj9d5"] Dec 02 14:27:23 crc kubenswrapper[4902]: W1202 14:27:23.773247 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4d2337b_c3b1_4759_999d_ab47b754a817.slice/crio-721745d626326afd63c0edaef814174c06d47b86221fc217bb2cf305f7162dd4 WatchSource:0}: Error finding container 721745d626326afd63c0edaef814174c06d47b86221fc217bb2cf305f7162dd4: Status 404 returned error can't find the container with id 721745d626326afd63c0edaef814174c06d47b86221fc217bb2cf305f7162dd4 Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.777925 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.833613 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-c8sjz"] Dec 02 14:27:23 crc kubenswrapper[4902]: W1202 14:27:23.839018 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c33663a_0d99_4a94_bb43_46f102098870.slice/crio-7a64882b5b4d0fbfb071d4c4cff01c0a43d169a4899a8a07e4288257283967da WatchSource:0}: Error finding container 7a64882b5b4d0fbfb071d4c4cff01c0a43d169a4899a8a07e4288257283967da: Status 404 returned error can't find the container with id 7a64882b5b4d0fbfb071d4c4cff01c0a43d169a4899a8a07e4288257283967da Dec 02 14:27:23 crc kubenswrapper[4902]: I1202 14:27:23.869397 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-wvs9t"] Dec 02 14:27:23 crc kubenswrapper[4902]: W1202 14:27:23.872835 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1b016533_08b9_464a_8956_1c236d28e036.slice/crio-a9462a1386188776256fee3ee1e92915868c9abc14780f7ca3a29a8503a4bb2e WatchSource:0}: Error finding container a9462a1386188776256fee3ee1e92915868c9abc14780f7ca3a29a8503a4bb2e: Status 404 returned error can't find the container with id a9462a1386188776256fee3ee1e92915868c9abc14780f7ca3a29a8503a4bb2e Dec 02 14:27:24 crc kubenswrapper[4902]: I1202 14:27:24.598019 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-wvs9t" event={"ID":"1b016533-08b9-464a-8956-1c236d28e036","Type":"ContainerStarted","Data":"a9462a1386188776256fee3ee1e92915868c9abc14780f7ca3a29a8503a4bb2e"} Dec 02 14:27:24 crc kubenswrapper[4902]: I1202 14:27:24.599042 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-bj9d5" event={"ID":"c4d2337b-c3b1-4759-999d-ab47b754a817","Type":"ContainerStarted","Data":"721745d626326afd63c0edaef814174c06d47b86221fc217bb2cf305f7162dd4"} Dec 02 14:27:24 crc kubenswrapper[4902]: I1202 14:27:24.601224 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-c8sjz" event={"ID":"0c33663a-0d99-4a94-bb43-46f102098870","Type":"ContainerStarted","Data":"7a64882b5b4d0fbfb071d4c4cff01c0a43d169a4899a8a07e4288257283967da"} Dec 02 14:27:26 crc kubenswrapper[4902]: I1202 14:27:26.615377 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-bj9d5" event={"ID":"c4d2337b-c3b1-4759-999d-ab47b754a817","Type":"ContainerStarted","Data":"d8e6484e74a9c1a5c9be0c0246883f91df143bff7ca35655e7fbd808b314fe19"} Dec 02 14:27:26 crc kubenswrapper[4902]: I1202 14:27:26.629049 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-bj9d5" podStartSLOduration=1.737602055 podStartE2EDuration="3.629020258s" podCreationTimestamp="2025-12-02 14:27:23 +0000 UTC" firstStartedPulling="2025-12-02 14:27:23.777700357 +0000 UTC m=+674.969009066" lastFinishedPulling="2025-12-02 14:27:25.66911856 +0000 UTC m=+676.860427269" observedRunningTime="2025-12-02 14:27:26.628286427 +0000 UTC m=+677.819595136" watchObservedRunningTime="2025-12-02 14:27:26.629020258 +0000 UTC m=+677.820328967" Dec 02 14:27:27 crc kubenswrapper[4902]: I1202 14:27:27.621695 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-c8sjz" event={"ID":"0c33663a-0d99-4a94-bb43-46f102098870","Type":"ContainerStarted","Data":"3c81a5fa19a7d95cddd75a656bbb6fd8f4b27f5a628115d992168f97cacc84e2"} Dec 02 14:27:27 crc kubenswrapper[4902]: I1202 14:27:27.623416 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-wvs9t" event={"ID":"1b016533-08b9-464a-8956-1c236d28e036","Type":"ContainerStarted","Data":"60c82b90eda6f07f32d3843a5971baec692bac1e0474b1da5e202143a3e5e300"} Dec 02 14:27:27 crc kubenswrapper[4902]: I1202 14:27:27.623647 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-wvs9t" Dec 02 14:27:27 crc kubenswrapper[4902]: I1202 14:27:27.637247 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-c8sjz" podStartSLOduration=1.128392454 podStartE2EDuration="4.637228451s" podCreationTimestamp="2025-12-02 14:27:23 +0000 UTC" firstStartedPulling="2025-12-02 14:27:23.840813867 +0000 UTC m=+675.032122576" lastFinishedPulling="2025-12-02 14:27:27.349649834 +0000 UTC m=+678.540958573" observedRunningTime="2025-12-02 14:27:27.63577593 +0000 UTC m=+678.827084659" watchObservedRunningTime="2025-12-02 14:27:27.637228451 +0000 UTC m=+678.828537170" Dec 02 14:27:27 crc kubenswrapper[4902]: I1202 14:27:27.656144 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-wvs9t" podStartSLOduration=2.01746532 podStartE2EDuration="4.656122083s" podCreationTimestamp="2025-12-02 14:27:23 +0000 UTC" firstStartedPulling="2025-12-02 14:27:23.879794584 +0000 UTC m=+675.071103293" lastFinishedPulling="2025-12-02 14:27:26.518451347 +0000 UTC m=+677.709760056" observedRunningTime="2025-12-02 14:27:27.65077543 +0000 UTC m=+678.842084149" watchObservedRunningTime="2025-12-02 14:27:27.656122083 +0000 UTC m=+678.847430812" Dec 02 14:27:33 crc kubenswrapper[4902]: I1202 14:27:33.638069 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-wvs9t" Dec 02 14:27:33 crc kubenswrapper[4902]: I1202 14:27:33.743100 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-q55jp"] Dec 02 14:27:33 crc kubenswrapper[4902]: I1202 14:27:33.743896 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovn-controller" containerID="cri-o://0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b" gracePeriod=30 Dec 02 14:27:33 crc kubenswrapper[4902]: I1202 14:27:33.744239 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8" gracePeriod=30 Dec 02 14:27:33 crc kubenswrapper[4902]: I1202 14:27:33.744225 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="nbdb" containerID="cri-o://ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a" gracePeriod=30 Dec 02 14:27:33 crc kubenswrapper[4902]: I1202 14:27:33.744343 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="kube-rbac-proxy-node" containerID="cri-o://32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456" gracePeriod=30 Dec 02 14:27:33 crc kubenswrapper[4902]: I1202 14:27:33.744362 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovn-acl-logging" containerID="cri-o://67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842" gracePeriod=30 Dec 02 14:27:33 crc kubenswrapper[4902]: I1202 14:27:33.744337 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="sbdb" containerID="cri-o://a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8" gracePeriod=30 Dec 02 14:27:33 crc kubenswrapper[4902]: I1202 14:27:33.744335 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="northd" containerID="cri-o://c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506" gracePeriod=30 Dec 02 14:27:33 crc kubenswrapper[4902]: I1202 14:27:33.779215 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" containerID="cri-o://f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9" gracePeriod=30 Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.080786 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/3.log" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.083725 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovn-acl-logging/0.log" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.084266 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovn-controller/0.log" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.084706 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146377 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7ngxn"] Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.146641 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146657 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.146671 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="sbdb" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146680 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="sbdb" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.146691 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovn-acl-logging" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146698 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovn-acl-logging" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.146710 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="northd" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146718 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="northd" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.146730 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146737 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.146747 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146754 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.146762 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="nbdb" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146770 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="nbdb" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.146785 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146793 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.146806 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovn-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146814 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovn-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.146823 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="kube-rbac-proxy-node" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146830 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="kube-rbac-proxy-node" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.146841 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="kubecfg-setup" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146849 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="kubecfg-setup" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.146860 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146868 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146970 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146981 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.146991 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.147003 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.147015 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="nbdb" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.147026 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="kube-rbac-proxy-node" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.147037 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovn-acl-logging" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.147049 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="northd" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.147061 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="sbdb" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.147071 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovn-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.147180 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.147188 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.147288 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.147303 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerName="ovnkube-controller" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.149283 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.279924 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovnkube-script-lib\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.279998 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-node-log\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280019 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-openvswitch\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280054 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g4l5\" (UniqueName: \"kubernetes.io/projected/9379a49e-d66c-4224-a564-f00d4cadd9ea-kube-api-access-6g4l5\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280101 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-systemd\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280130 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-systemd-units\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280147 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-log-socket\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280168 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-cni-bin\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280191 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovnkube-config\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280239 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-run-ovn-kubernetes\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280265 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-kubelet\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280292 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-cni-netd\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280173 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280224 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-log-socket" (OuterVolumeSpecName: "log-socket") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280348 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280305 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280325 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovn-node-metrics-cert\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280362 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280485 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-slash\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280307 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280372 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280526 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-ovn\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280553 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280581 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-var-lib-cni-networks-ovn-kubernetes\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280625 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-slash" (OuterVolumeSpecName: "host-slash") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280632 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280641 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280675 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-run-netns\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280696 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280722 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-var-lib-openvswitch\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280760 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-env-overrides\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280773 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280788 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-etc-openvswitch\") pod \"9379a49e-d66c-4224-a564-f00d4cadd9ea\" (UID: \"9379a49e-d66c-4224-a564-f00d4cadd9ea\") " Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280869 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280950 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280897 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-node-log" (OuterVolumeSpecName: "node-log") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.280990 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-var-lib-openvswitch\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281061 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-kubelet\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281108 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-systemd-units\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281148 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-ovn-node-metrics-cert\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281207 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281220 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-run-openvswitch\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281431 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-run-systemd\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281477 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-node-log\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281499 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-run-ovn-kubernetes\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281607 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-run-ovn\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281646 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-run-netns\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281675 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-slash\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281693 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-etc-openvswitch\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281716 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-ovnkube-script-lib\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281739 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281790 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-log-socket\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281809 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-cni-netd\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281864 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-ovnkube-config\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.281956 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-env-overrides\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282173 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56zkt\" (UniqueName: \"kubernetes.io/projected/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-kube-api-access-56zkt\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282220 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-cni-bin\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282325 4902 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282336 4902 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-log-socket\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282346 4902 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282354 4902 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282363 4902 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282372 4902 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282381 4902 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282391 4902 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-slash\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282403 4902 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282414 4902 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282423 4902 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282433 4902 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282441 4902 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282449 4902 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282457 4902 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282465 4902 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-node-log\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.282473 4902 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.286651 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9379a49e-d66c-4224-a564-f00d4cadd9ea-kube-api-access-6g4l5" (OuterVolumeSpecName: "kube-api-access-6g4l5") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "kube-api-access-6g4l5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.287203 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.295137 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "9379a49e-d66c-4224-a564-f00d4cadd9ea" (UID: "9379a49e-d66c-4224-a564-f00d4cadd9ea"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383142 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-cni-netd\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383209 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-log-socket\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383260 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-ovnkube-config\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383290 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-cni-netd\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383295 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-env-overrides\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383379 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56zkt\" (UniqueName: \"kubernetes.io/projected/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-kube-api-access-56zkt\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383380 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-log-socket\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383427 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-cni-bin\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383474 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-cni-bin\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383538 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-var-lib-openvswitch\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383620 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-kubelet\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383658 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-systemd-units\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383686 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-ovn-node-metrics-cert\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383703 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-kubelet\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383720 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-run-openvswitch\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383745 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-run-openvswitch\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383728 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-var-lib-openvswitch\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383772 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-run-systemd\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383772 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-systemd-units\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383844 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-node-log\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383809 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-node-log\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383840 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-run-systemd\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383904 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-run-ovn-kubernetes\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383932 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-run-ovn-kubernetes\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383951 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-run-ovn\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.383987 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-run-netns\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.384019 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-slash\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.384022 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-run-ovn\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.384044 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-run-netns\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.384050 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-etc-openvswitch\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.384073 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-etc-openvswitch\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.384079 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-env-overrides\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.384090 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-ovnkube-script-lib\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.384105 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-slash\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.384171 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.384208 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.384229 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g4l5\" (UniqueName: \"kubernetes.io/projected/9379a49e-d66c-4224-a564-f00d4cadd9ea-kube-api-access-6g4l5\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.384250 4902 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9379a49e-d66c-4224-a564-f00d4cadd9ea-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.384267 4902 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9379a49e-d66c-4224-a564-f00d4cadd9ea-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.384386 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-ovnkube-config\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.385144 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-ovnkube-script-lib\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.387241 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-ovn-node-metrics-cert\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.409951 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56zkt\" (UniqueName: \"kubernetes.io/projected/9fe0c43e-abdd-4536-9ecb-35d331d0b5b2-kube-api-access-56zkt\") pod \"ovnkube-node-7ngxn\" (UID: \"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.465877 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:34 crc kubenswrapper[4902]: W1202 14:27:34.498249 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9fe0c43e_abdd_4536_9ecb_35d331d0b5b2.slice/crio-f02284e1be6737f78451b9312e73117c3cb37e1eeac587f4504a8d14f056e003 WatchSource:0}: Error finding container f02284e1be6737f78451b9312e73117c3cb37e1eeac587f4504a8d14f056e003: Status 404 returned error can't find the container with id f02284e1be6737f78451b9312e73117c3cb37e1eeac587f4504a8d14f056e003 Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.675509 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovnkube-controller/3.log" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.678942 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovn-acl-logging/0.log" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.679707 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-q55jp_9379a49e-d66c-4224-a564-f00d4cadd9ea/ovn-controller/0.log" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680358 4902 generic.go:334] "Generic (PLEG): container finished" podID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerID="f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9" exitCode=0 Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680417 4902 generic.go:334] "Generic (PLEG): container finished" podID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerID="a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8" exitCode=0 Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680433 4902 generic.go:334] "Generic (PLEG): container finished" podID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerID="ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a" exitCode=0 Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680447 4902 generic.go:334] "Generic (PLEG): container finished" podID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerID="c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506" exitCode=0 Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680451 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerDied","Data":"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680434 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680488 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerDied","Data":"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680502 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerDied","Data":"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680513 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerDied","Data":"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680525 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerDied","Data":"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680544 4902 scope.go:117] "RemoveContainer" containerID="f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680461 4902 generic.go:334] "Generic (PLEG): container finished" podID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerID="6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8" exitCode=0 Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680656 4902 generic.go:334] "Generic (PLEG): container finished" podID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerID="32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456" exitCode=0 Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680670 4902 generic.go:334] "Generic (PLEG): container finished" podID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerID="67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842" exitCode=143 Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680680 4902 generic.go:334] "Generic (PLEG): container finished" podID="9379a49e-d66c-4224-a564-f00d4cadd9ea" containerID="0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b" exitCode=143 Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680710 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerDied","Data":"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680724 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680735 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680742 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680749 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680756 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680763 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680769 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680776 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680784 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680795 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerDied","Data":"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680806 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680814 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680822 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680829 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680836 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680843 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680850 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680856 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680863 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680869 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680878 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerDied","Data":"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680889 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680897 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680904 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680910 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680919 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680926 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680932 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680940 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680946 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680953 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680962 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-q55jp" event={"ID":"9379a49e-d66c-4224-a564-f00d4cadd9ea","Type":"ContainerDied","Data":"744eec9ba69a27be30b03927bbd1d12fbda0909ae1a2d6492c91bd32b0b22bf6"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680973 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680982 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680990 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.680997 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.681006 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.681014 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.681021 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.681030 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.681038 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.681046 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.684732 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vm9q6_381fdb33-d71e-468a-9b1e-a2920c32f8ae/kube-multus/2.log" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.686974 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vm9q6_381fdb33-d71e-468a-9b1e-a2920c32f8ae/kube-multus/1.log" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.687064 4902 generic.go:334] "Generic (PLEG): container finished" podID="381fdb33-d71e-468a-9b1e-a2920c32f8ae" containerID="fe4723b6a8f30dd9538ec0aeab70c283c879f9591ce80fad74ace550d3827292" exitCode=2 Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.687206 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vm9q6" event={"ID":"381fdb33-d71e-468a-9b1e-a2920c32f8ae","Type":"ContainerDied","Data":"fe4723b6a8f30dd9538ec0aeab70c283c879f9591ce80fad74ace550d3827292"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.687309 4902 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2b22a8b4622242be62449d68734ec9d4df52cf0cb435e7c994124c321a57750b"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.688362 4902 scope.go:117] "RemoveContainer" containerID="fe4723b6a8f30dd9538ec0aeab70c283c879f9591ce80fad74ace550d3827292" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.688905 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-vm9q6_openshift-multus(381fdb33-d71e-468a-9b1e-a2920c32f8ae)\"" pod="openshift-multus/multus-vm9q6" podUID="381fdb33-d71e-468a-9b1e-a2920c32f8ae" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.692065 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" event={"ID":"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2","Type":"ContainerStarted","Data":"3620e7671c0e669627baa9161afd1ecca2f5d545f879bdca33d0c2bcc14fe9ea"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.692137 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" event={"ID":"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2","Type":"ContainerStarted","Data":"f02284e1be6737f78451b9312e73117c3cb37e1eeac587f4504a8d14f056e003"} Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.755284 4902 scope.go:117] "RemoveContainer" containerID="cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.779959 4902 scope.go:117] "RemoveContainer" containerID="a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.797232 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-q55jp"] Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.803168 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-q55jp"] Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.806933 4902 scope.go:117] "RemoveContainer" containerID="ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.829771 4902 scope.go:117] "RemoveContainer" containerID="c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.842030 4902 scope.go:117] "RemoveContainer" containerID="6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.857348 4902 scope.go:117] "RemoveContainer" containerID="32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.878487 4902 scope.go:117] "RemoveContainer" containerID="67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.909412 4902 scope.go:117] "RemoveContainer" containerID="0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.940344 4902 scope.go:117] "RemoveContainer" containerID="92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.959638 4902 scope.go:117] "RemoveContainer" containerID="f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.960131 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9\": container with ID starting with f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9 not found: ID does not exist" containerID="f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.960183 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9"} err="failed to get container status \"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9\": rpc error: code = NotFound desc = could not find container \"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9\": container with ID starting with f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.960216 4902 scope.go:117] "RemoveContainer" containerID="cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.960691 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d\": container with ID starting with cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d not found: ID does not exist" containerID="cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.960730 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d"} err="failed to get container status \"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d\": rpc error: code = NotFound desc = could not find container \"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d\": container with ID starting with cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.960761 4902 scope.go:117] "RemoveContainer" containerID="a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.961139 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\": container with ID starting with a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8 not found: ID does not exist" containerID="a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.961210 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8"} err="failed to get container status \"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\": rpc error: code = NotFound desc = could not find container \"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\": container with ID starting with a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.961247 4902 scope.go:117] "RemoveContainer" containerID="ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.961630 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\": container with ID starting with ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a not found: ID does not exist" containerID="ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.961660 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a"} err="failed to get container status \"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\": rpc error: code = NotFound desc = could not find container \"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\": container with ID starting with ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.961682 4902 scope.go:117] "RemoveContainer" containerID="c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.962088 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\": container with ID starting with c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506 not found: ID does not exist" containerID="c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.962145 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506"} err="failed to get container status \"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\": rpc error: code = NotFound desc = could not find container \"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\": container with ID starting with c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.962165 4902 scope.go:117] "RemoveContainer" containerID="6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.962491 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\": container with ID starting with 6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8 not found: ID does not exist" containerID="6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.962523 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8"} err="failed to get container status \"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\": rpc error: code = NotFound desc = could not find container \"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\": container with ID starting with 6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.962579 4902 scope.go:117] "RemoveContainer" containerID="32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.963817 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\": container with ID starting with 32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456 not found: ID does not exist" containerID="32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.963847 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456"} err="failed to get container status \"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\": rpc error: code = NotFound desc = could not find container \"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\": container with ID starting with 32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.963869 4902 scope.go:117] "RemoveContainer" containerID="67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.964283 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\": container with ID starting with 67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842 not found: ID does not exist" containerID="67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.964317 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842"} err="failed to get container status \"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\": rpc error: code = NotFound desc = could not find container \"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\": container with ID starting with 67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.964337 4902 scope.go:117] "RemoveContainer" containerID="0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.964662 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\": container with ID starting with 0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b not found: ID does not exist" containerID="0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.964699 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b"} err="failed to get container status \"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\": rpc error: code = NotFound desc = could not find container \"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\": container with ID starting with 0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.964721 4902 scope.go:117] "RemoveContainer" containerID="92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7" Dec 02 14:27:34 crc kubenswrapper[4902]: E1202 14:27:34.965084 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\": container with ID starting with 92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7 not found: ID does not exist" containerID="92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.965126 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7"} err="failed to get container status \"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\": rpc error: code = NotFound desc = could not find container \"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\": container with ID starting with 92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.965143 4902 scope.go:117] "RemoveContainer" containerID="f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.965360 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9"} err="failed to get container status \"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9\": rpc error: code = NotFound desc = could not find container \"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9\": container with ID starting with f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.965401 4902 scope.go:117] "RemoveContainer" containerID="cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.965839 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d"} err="failed to get container status \"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d\": rpc error: code = NotFound desc = could not find container \"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d\": container with ID starting with cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.965865 4902 scope.go:117] "RemoveContainer" containerID="a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.966470 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8"} err="failed to get container status \"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\": rpc error: code = NotFound desc = could not find container \"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\": container with ID starting with a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.966498 4902 scope.go:117] "RemoveContainer" containerID="ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.966821 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a"} err="failed to get container status \"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\": rpc error: code = NotFound desc = could not find container \"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\": container with ID starting with ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.966848 4902 scope.go:117] "RemoveContainer" containerID="c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.967175 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506"} err="failed to get container status \"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\": rpc error: code = NotFound desc = could not find container \"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\": container with ID starting with c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.967208 4902 scope.go:117] "RemoveContainer" containerID="6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.967521 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8"} err="failed to get container status \"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\": rpc error: code = NotFound desc = could not find container \"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\": container with ID starting with 6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.967551 4902 scope.go:117] "RemoveContainer" containerID="32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.967910 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456"} err="failed to get container status \"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\": rpc error: code = NotFound desc = could not find container \"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\": container with ID starting with 32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.967937 4902 scope.go:117] "RemoveContainer" containerID="67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.968260 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842"} err="failed to get container status \"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\": rpc error: code = NotFound desc = could not find container \"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\": container with ID starting with 67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.968282 4902 scope.go:117] "RemoveContainer" containerID="0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.968608 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b"} err="failed to get container status \"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\": rpc error: code = NotFound desc = could not find container \"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\": container with ID starting with 0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.968648 4902 scope.go:117] "RemoveContainer" containerID="92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.969148 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7"} err="failed to get container status \"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\": rpc error: code = NotFound desc = could not find container \"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\": container with ID starting with 92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.969169 4902 scope.go:117] "RemoveContainer" containerID="f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.969396 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9"} err="failed to get container status \"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9\": rpc error: code = NotFound desc = could not find container \"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9\": container with ID starting with f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.969421 4902 scope.go:117] "RemoveContainer" containerID="cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.969767 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d"} err="failed to get container status \"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d\": rpc error: code = NotFound desc = could not find container \"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d\": container with ID starting with cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.969805 4902 scope.go:117] "RemoveContainer" containerID="a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.970036 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8"} err="failed to get container status \"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\": rpc error: code = NotFound desc = could not find container \"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\": container with ID starting with a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.970055 4902 scope.go:117] "RemoveContainer" containerID="ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.970354 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a"} err="failed to get container status \"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\": rpc error: code = NotFound desc = could not find container \"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\": container with ID starting with ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.970378 4902 scope.go:117] "RemoveContainer" containerID="c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.970653 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506"} err="failed to get container status \"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\": rpc error: code = NotFound desc = could not find container \"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\": container with ID starting with c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.970671 4902 scope.go:117] "RemoveContainer" containerID="6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.971680 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8"} err="failed to get container status \"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\": rpc error: code = NotFound desc = could not find container \"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\": container with ID starting with 6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.971699 4902 scope.go:117] "RemoveContainer" containerID="32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.972967 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456"} err="failed to get container status \"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\": rpc error: code = NotFound desc = could not find container \"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\": container with ID starting with 32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.973008 4902 scope.go:117] "RemoveContainer" containerID="67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.973391 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842"} err="failed to get container status \"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\": rpc error: code = NotFound desc = could not find container \"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\": container with ID starting with 67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.973412 4902 scope.go:117] "RemoveContainer" containerID="0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.973772 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b"} err="failed to get container status \"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\": rpc error: code = NotFound desc = could not find container \"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\": container with ID starting with 0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.973793 4902 scope.go:117] "RemoveContainer" containerID="92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.974102 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7"} err="failed to get container status \"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\": rpc error: code = NotFound desc = could not find container \"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\": container with ID starting with 92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.974171 4902 scope.go:117] "RemoveContainer" containerID="f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.974498 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9"} err="failed to get container status \"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9\": rpc error: code = NotFound desc = could not find container \"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9\": container with ID starting with f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.974533 4902 scope.go:117] "RemoveContainer" containerID="cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.974804 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d"} err="failed to get container status \"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d\": rpc error: code = NotFound desc = could not find container \"cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d\": container with ID starting with cc39d64420c6c04731d61578640792935717b23b5302c2643b4fc3ef1c22813d not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.974840 4902 scope.go:117] "RemoveContainer" containerID="a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.975134 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8"} err="failed to get container status \"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\": rpc error: code = NotFound desc = could not find container \"a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8\": container with ID starting with a73fc1aad132c06621dceffa4b7d90167e2a77671e792f546bd655d1f2c175b8 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.975173 4902 scope.go:117] "RemoveContainer" containerID="ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.975457 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a"} err="failed to get container status \"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\": rpc error: code = NotFound desc = could not find container \"ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a\": container with ID starting with ca0f43ac81add359e523547a172b5fb485a2f6e822bd3f66491feba9d540ad8a not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.975487 4902 scope.go:117] "RemoveContainer" containerID="c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.976282 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506"} err="failed to get container status \"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\": rpc error: code = NotFound desc = could not find container \"c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506\": container with ID starting with c00c6e334bab0fcb333092c334adb407f8801e2b2ab2cae4d4d219de11d28506 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.976310 4902 scope.go:117] "RemoveContainer" containerID="6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.976582 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8"} err="failed to get container status \"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\": rpc error: code = NotFound desc = could not find container \"6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8\": container with ID starting with 6ca2714b03644eb2600a558d29ff2d92e34d1199bcb4d4cf13b35b0026eaf8c8 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.976616 4902 scope.go:117] "RemoveContainer" containerID="32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.976858 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456"} err="failed to get container status \"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\": rpc error: code = NotFound desc = could not find container \"32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456\": container with ID starting with 32e9d245249be242b320171a8d70c89d7e0ee929908c1d9da4d7f6205ebb2456 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.976879 4902 scope.go:117] "RemoveContainer" containerID="67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.977108 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842"} err="failed to get container status \"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\": rpc error: code = NotFound desc = could not find container \"67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842\": container with ID starting with 67a88c7882ba5a1755e1a558b813a414cd5cb4a93dbf26b6d20785ef9b378842 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.977176 4902 scope.go:117] "RemoveContainer" containerID="0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.977369 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b"} err="failed to get container status \"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\": rpc error: code = NotFound desc = could not find container \"0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b\": container with ID starting with 0bd9d8b2961e0f8c79b426a109bc4d2a3810f74abedeb73b1cb1f71a4241d74b not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.977391 4902 scope.go:117] "RemoveContainer" containerID="92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.977586 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7"} err="failed to get container status \"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\": rpc error: code = NotFound desc = could not find container \"92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7\": container with ID starting with 92f6d296d63b6a549bdee4330a8b6fabb5b803ccad63b69c16a1e768ca77cde7 not found: ID does not exist" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.977608 4902 scope.go:117] "RemoveContainer" containerID="f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9" Dec 02 14:27:34 crc kubenswrapper[4902]: I1202 14:27:34.977791 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9"} err="failed to get container status \"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9\": rpc error: code = NotFound desc = could not find container \"f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9\": container with ID starting with f5c3062637bad287b9b9ccb6dfb691d44c351938b11bd7f8f148a9058428aae9 not found: ID does not exist" Dec 02 14:27:35 crc kubenswrapper[4902]: I1202 14:27:35.119437 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9379a49e-d66c-4224-a564-f00d4cadd9ea" path="/var/lib/kubelet/pods/9379a49e-d66c-4224-a564-f00d4cadd9ea/volumes" Dec 02 14:27:35 crc kubenswrapper[4902]: I1202 14:27:35.703263 4902 generic.go:334] "Generic (PLEG): container finished" podID="9fe0c43e-abdd-4536-9ecb-35d331d0b5b2" containerID="3620e7671c0e669627baa9161afd1ecca2f5d545f879bdca33d0c2bcc14fe9ea" exitCode=0 Dec 02 14:27:35 crc kubenswrapper[4902]: I1202 14:27:35.703345 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" event={"ID":"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2","Type":"ContainerDied","Data":"3620e7671c0e669627baa9161afd1ecca2f5d545f879bdca33d0c2bcc14fe9ea"} Dec 02 14:27:35 crc kubenswrapper[4902]: I1202 14:27:35.703642 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" event={"ID":"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2","Type":"ContainerStarted","Data":"d4b045f5912d639de76c2114101fe905ff6142bfe5ea497057bdb8d8b7ab492d"} Dec 02 14:27:35 crc kubenswrapper[4902]: I1202 14:27:35.703663 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" event={"ID":"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2","Type":"ContainerStarted","Data":"fca04f2976473ee0c048583e0598548759916c9bb4c5be489d9ce47d92e65184"} Dec 02 14:27:35 crc kubenswrapper[4902]: I1202 14:27:35.703679 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" event={"ID":"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2","Type":"ContainerStarted","Data":"cf096d5f8d6d245ec13f0c0229a5a6c42dcf617119bb1b75db324a1a4a3eab2f"} Dec 02 14:27:35 crc kubenswrapper[4902]: I1202 14:27:35.703691 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" event={"ID":"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2","Type":"ContainerStarted","Data":"5b9c0a445d9f67efbd79315969002ff2be3e979c08e9c1322ff1ebe0f45c709c"} Dec 02 14:27:35 crc kubenswrapper[4902]: I1202 14:27:35.703702 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" event={"ID":"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2","Type":"ContainerStarted","Data":"d294373797db9246e43398bcd5fd43f4301bc7ab2853af3dcb2d1c44c76a2ea3"} Dec 02 14:27:35 crc kubenswrapper[4902]: I1202 14:27:35.703717 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" event={"ID":"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2","Type":"ContainerStarted","Data":"a7c754bc9a8e1067d60f2c00e8e7f02cad59538654cddcbbcd12f1a22fb688dc"} Dec 02 14:27:38 crc kubenswrapper[4902]: I1202 14:27:38.729346 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" event={"ID":"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2","Type":"ContainerStarted","Data":"7afa5ce13764cd34e1b1eca4969ba00398e40b818e7929de13ce775c2dc8a019"} Dec 02 14:27:40 crc kubenswrapper[4902]: I1202 14:27:40.762702 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" event={"ID":"9fe0c43e-abdd-4536-9ecb-35d331d0b5b2","Type":"ContainerStarted","Data":"0ea25755490ba758648a5de60d4de11d511c8d1326873dd2d553671131e0e254"} Dec 02 14:27:40 crc kubenswrapper[4902]: I1202 14:27:40.763303 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:40 crc kubenswrapper[4902]: I1202 14:27:40.793605 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:40 crc kubenswrapper[4902]: I1202 14:27:40.800747 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" podStartSLOduration=6.800708856 podStartE2EDuration="6.800708856s" podCreationTimestamp="2025-12-02 14:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:27:40.793609952 +0000 UTC m=+691.984918681" watchObservedRunningTime="2025-12-02 14:27:40.800708856 +0000 UTC m=+691.992017585" Dec 02 14:27:41 crc kubenswrapper[4902]: I1202 14:27:41.770189 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:41 crc kubenswrapper[4902]: I1202 14:27:41.770276 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:41 crc kubenswrapper[4902]: I1202 14:27:41.848744 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:27:45 crc kubenswrapper[4902]: I1202 14:27:45.107655 4902 scope.go:117] "RemoveContainer" containerID="fe4723b6a8f30dd9538ec0aeab70c283c879f9591ce80fad74ace550d3827292" Dec 02 14:27:45 crc kubenswrapper[4902]: E1202 14:27:45.108020 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-vm9q6_openshift-multus(381fdb33-d71e-468a-9b1e-a2920c32f8ae)\"" pod="openshift-multus/multus-vm9q6" podUID="381fdb33-d71e-468a-9b1e-a2920c32f8ae" Dec 02 14:27:56 crc kubenswrapper[4902]: I1202 14:27:56.107020 4902 scope.go:117] "RemoveContainer" containerID="fe4723b6a8f30dd9538ec0aeab70c283c879f9591ce80fad74ace550d3827292" Dec 02 14:27:57 crc kubenswrapper[4902]: I1202 14:27:57.877405 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vm9q6_381fdb33-d71e-468a-9b1e-a2920c32f8ae/kube-multus/2.log" Dec 02 14:27:57 crc kubenswrapper[4902]: I1202 14:27:57.878459 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vm9q6_381fdb33-d71e-468a-9b1e-a2920c32f8ae/kube-multus/1.log" Dec 02 14:27:57 crc kubenswrapper[4902]: I1202 14:27:57.878590 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vm9q6" event={"ID":"381fdb33-d71e-468a-9b1e-a2920c32f8ae","Type":"ContainerStarted","Data":"6971c0054215813337df994fde8361eabbdbdd8ac4a6be6b23ed8e3a47e32bcb"} Dec 02 14:28:02 crc kubenswrapper[4902]: I1202 14:28:02.127708 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m"] Dec 02 14:28:02 crc kubenswrapper[4902]: I1202 14:28:02.144071 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" Dec 02 14:28:02 crc kubenswrapper[4902]: I1202 14:28:02.147162 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m"] Dec 02 14:28:02 crc kubenswrapper[4902]: I1202 14:28:02.147251 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 14:28:02 crc kubenswrapper[4902]: I1202 14:28:02.315537 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/462c708d-094f-4814-a61f-a94b5b493956-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m\" (UID: \"462c708d-094f-4814-a61f-a94b5b493956\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" Dec 02 14:28:02 crc kubenswrapper[4902]: I1202 14:28:02.315659 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62vtv\" (UniqueName: \"kubernetes.io/projected/462c708d-094f-4814-a61f-a94b5b493956-kube-api-access-62vtv\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m\" (UID: \"462c708d-094f-4814-a61f-a94b5b493956\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" Dec 02 14:28:02 crc kubenswrapper[4902]: I1202 14:28:02.315690 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/462c708d-094f-4814-a61f-a94b5b493956-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m\" (UID: \"462c708d-094f-4814-a61f-a94b5b493956\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" Dec 02 14:28:02 crc kubenswrapper[4902]: I1202 14:28:02.417338 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62vtv\" (UniqueName: \"kubernetes.io/projected/462c708d-094f-4814-a61f-a94b5b493956-kube-api-access-62vtv\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m\" (UID: \"462c708d-094f-4814-a61f-a94b5b493956\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" Dec 02 14:28:02 crc kubenswrapper[4902]: I1202 14:28:02.417415 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/462c708d-094f-4814-a61f-a94b5b493956-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m\" (UID: \"462c708d-094f-4814-a61f-a94b5b493956\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" Dec 02 14:28:02 crc kubenswrapper[4902]: I1202 14:28:02.417531 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/462c708d-094f-4814-a61f-a94b5b493956-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m\" (UID: \"462c708d-094f-4814-a61f-a94b5b493956\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" Dec 02 14:28:02 crc kubenswrapper[4902]: I1202 14:28:02.418287 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/462c708d-094f-4814-a61f-a94b5b493956-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m\" (UID: \"462c708d-094f-4814-a61f-a94b5b493956\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" Dec 02 14:28:02 crc kubenswrapper[4902]: I1202 14:28:02.418475 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/462c708d-094f-4814-a61f-a94b5b493956-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m\" (UID: \"462c708d-094f-4814-a61f-a94b5b493956\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" Dec 02 14:28:02 crc kubenswrapper[4902]: I1202 14:28:02.456962 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62vtv\" (UniqueName: \"kubernetes.io/projected/462c708d-094f-4814-a61f-a94b5b493956-kube-api-access-62vtv\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m\" (UID: \"462c708d-094f-4814-a61f-a94b5b493956\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" Dec 02 14:28:02 crc kubenswrapper[4902]: I1202 14:28:02.485057 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" Dec 02 14:28:02 crc kubenswrapper[4902]: I1202 14:28:02.939692 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m"] Dec 02 14:28:03 crc kubenswrapper[4902]: I1202 14:28:03.923250 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" event={"ID":"462c708d-094f-4814-a61f-a94b5b493956","Type":"ContainerStarted","Data":"09240c2bc7c11dd3b57b3d9265a43210739a4746cb5e8bc0495f133f430b08be"} Dec 02 14:28:03 crc kubenswrapper[4902]: I1202 14:28:03.923698 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" event={"ID":"462c708d-094f-4814-a61f-a94b5b493956","Type":"ContainerStarted","Data":"fdeacb7b8783ccd05de0de0d6b86a0f57160f84fe635aa055ea7b9cce6ac89eb"} Dec 02 14:28:04 crc kubenswrapper[4902]: I1202 14:28:04.490710 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7ngxn" Dec 02 14:28:05 crc kubenswrapper[4902]: I1202 14:28:05.936947 4902 generic.go:334] "Generic (PLEG): container finished" podID="462c708d-094f-4814-a61f-a94b5b493956" containerID="09240c2bc7c11dd3b57b3d9265a43210739a4746cb5e8bc0495f133f430b08be" exitCode=0 Dec 02 14:28:05 crc kubenswrapper[4902]: I1202 14:28:05.937090 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" event={"ID":"462c708d-094f-4814-a61f-a94b5b493956","Type":"ContainerDied","Data":"09240c2bc7c11dd3b57b3d9265a43210739a4746cb5e8bc0495f133f430b08be"} Dec 02 14:28:07 crc kubenswrapper[4902]: I1202 14:28:07.952704 4902 generic.go:334] "Generic (PLEG): container finished" podID="462c708d-094f-4814-a61f-a94b5b493956" containerID="4e62202bd9e551c89fc0e522f3cbccc4be26716e5c66de0b617e570509d99ab8" exitCode=0 Dec 02 14:28:07 crc kubenswrapper[4902]: I1202 14:28:07.952824 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" event={"ID":"462c708d-094f-4814-a61f-a94b5b493956","Type":"ContainerDied","Data":"4e62202bd9e551c89fc0e522f3cbccc4be26716e5c66de0b617e570509d99ab8"} Dec 02 14:28:08 crc kubenswrapper[4902]: E1202 14:28:08.598784 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod462c708d_094f_4814_a61f_a94b5b493956.slice/crio-f3b1f7ec7c9541e8e6bf8528e2681566327bff681deecd1e63bffd58b330dff4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod462c708d_094f_4814_a61f_a94b5b493956.slice/crio-conmon-f3b1f7ec7c9541e8e6bf8528e2681566327bff681deecd1e63bffd58b330dff4.scope\": RecentStats: unable to find data in memory cache]" Dec 02 14:28:08 crc kubenswrapper[4902]: I1202 14:28:08.962875 4902 generic.go:334] "Generic (PLEG): container finished" podID="462c708d-094f-4814-a61f-a94b5b493956" containerID="f3b1f7ec7c9541e8e6bf8528e2681566327bff681deecd1e63bffd58b330dff4" exitCode=0 Dec 02 14:28:08 crc kubenswrapper[4902]: I1202 14:28:08.962965 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" event={"ID":"462c708d-094f-4814-a61f-a94b5b493956","Type":"ContainerDied","Data":"f3b1f7ec7c9541e8e6bf8528e2681566327bff681deecd1e63bffd58b330dff4"} Dec 02 14:28:09 crc kubenswrapper[4902]: I1202 14:28:09.525074 4902 scope.go:117] "RemoveContainer" containerID="2b22a8b4622242be62449d68734ec9d4df52cf0cb435e7c994124c321a57750b" Dec 02 14:28:09 crc kubenswrapper[4902]: I1202 14:28:09.971751 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vm9q6_381fdb33-d71e-468a-9b1e-a2920c32f8ae/kube-multus/2.log" Dec 02 14:28:10 crc kubenswrapper[4902]: I1202 14:28:10.256008 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" Dec 02 14:28:10 crc kubenswrapper[4902]: I1202 14:28:10.431606 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/462c708d-094f-4814-a61f-a94b5b493956-bundle\") pod \"462c708d-094f-4814-a61f-a94b5b493956\" (UID: \"462c708d-094f-4814-a61f-a94b5b493956\") " Dec 02 14:28:10 crc kubenswrapper[4902]: I1202 14:28:10.431678 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62vtv\" (UniqueName: \"kubernetes.io/projected/462c708d-094f-4814-a61f-a94b5b493956-kube-api-access-62vtv\") pod \"462c708d-094f-4814-a61f-a94b5b493956\" (UID: \"462c708d-094f-4814-a61f-a94b5b493956\") " Dec 02 14:28:10 crc kubenswrapper[4902]: I1202 14:28:10.431798 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/462c708d-094f-4814-a61f-a94b5b493956-util\") pod \"462c708d-094f-4814-a61f-a94b5b493956\" (UID: \"462c708d-094f-4814-a61f-a94b5b493956\") " Dec 02 14:28:10 crc kubenswrapper[4902]: I1202 14:28:10.437633 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/462c708d-094f-4814-a61f-a94b5b493956-bundle" (OuterVolumeSpecName: "bundle") pod "462c708d-094f-4814-a61f-a94b5b493956" (UID: "462c708d-094f-4814-a61f-a94b5b493956"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:28:10 crc kubenswrapper[4902]: I1202 14:28:10.441094 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/462c708d-094f-4814-a61f-a94b5b493956-kube-api-access-62vtv" (OuterVolumeSpecName: "kube-api-access-62vtv") pod "462c708d-094f-4814-a61f-a94b5b493956" (UID: "462c708d-094f-4814-a61f-a94b5b493956"). InnerVolumeSpecName "kube-api-access-62vtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:28:10 crc kubenswrapper[4902]: I1202 14:28:10.455681 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/462c708d-094f-4814-a61f-a94b5b493956-util" (OuterVolumeSpecName: "util") pod "462c708d-094f-4814-a61f-a94b5b493956" (UID: "462c708d-094f-4814-a61f-a94b5b493956"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:28:10 crc kubenswrapper[4902]: I1202 14:28:10.533505 4902 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/462c708d-094f-4814-a61f-a94b5b493956-util\") on node \"crc\" DevicePath \"\"" Dec 02 14:28:10 crc kubenswrapper[4902]: I1202 14:28:10.533864 4902 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/462c708d-094f-4814-a61f-a94b5b493956-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:28:10 crc kubenswrapper[4902]: I1202 14:28:10.534037 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62vtv\" (UniqueName: \"kubernetes.io/projected/462c708d-094f-4814-a61f-a94b5b493956-kube-api-access-62vtv\") on node \"crc\" DevicePath \"\"" Dec 02 14:28:10 crc kubenswrapper[4902]: I1202 14:28:10.981786 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" event={"ID":"462c708d-094f-4814-a61f-a94b5b493956","Type":"ContainerDied","Data":"fdeacb7b8783ccd05de0de0d6b86a0f57160f84fe635aa055ea7b9cce6ac89eb"} Dec 02 14:28:10 crc kubenswrapper[4902]: I1202 14:28:10.981835 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fdeacb7b8783ccd05de0de0d6b86a0f57160f84fe635aa055ea7b9cce6ac89eb" Dec 02 14:28:10 crc kubenswrapper[4902]: I1202 14:28:10.982798 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m" Dec 02 14:28:19 crc kubenswrapper[4902]: I1202 14:28:19.866300 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-sbwhh"] Dec 02 14:28:19 crc kubenswrapper[4902]: E1202 14:28:19.866909 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="462c708d-094f-4814-a61f-a94b5b493956" containerName="pull" Dec 02 14:28:19 crc kubenswrapper[4902]: I1202 14:28:19.866921 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="462c708d-094f-4814-a61f-a94b5b493956" containerName="pull" Dec 02 14:28:19 crc kubenswrapper[4902]: E1202 14:28:19.866932 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="462c708d-094f-4814-a61f-a94b5b493956" containerName="util" Dec 02 14:28:19 crc kubenswrapper[4902]: I1202 14:28:19.866938 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="462c708d-094f-4814-a61f-a94b5b493956" containerName="util" Dec 02 14:28:19 crc kubenswrapper[4902]: E1202 14:28:19.866946 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="462c708d-094f-4814-a61f-a94b5b493956" containerName="extract" Dec 02 14:28:19 crc kubenswrapper[4902]: I1202 14:28:19.866954 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="462c708d-094f-4814-a61f-a94b5b493956" containerName="extract" Dec 02 14:28:19 crc kubenswrapper[4902]: I1202 14:28:19.867046 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="462c708d-094f-4814-a61f-a94b5b493956" containerName="extract" Dec 02 14:28:19 crc kubenswrapper[4902]: I1202 14:28:19.867399 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sbwhh" Dec 02 14:28:19 crc kubenswrapper[4902]: I1202 14:28:19.876182 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 02 14:28:19 crc kubenswrapper[4902]: I1202 14:28:19.876974 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-gdqhs" Dec 02 14:28:19 crc kubenswrapper[4902]: I1202 14:28:19.890527 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 02 14:28:19 crc kubenswrapper[4902]: I1202 14:28:19.901079 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-sbwhh"] Dec 02 14:28:19 crc kubenswrapper[4902]: I1202 14:28:19.983096 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg"] Dec 02 14:28:19 crc kubenswrapper[4902]: I1202 14:28:19.983798 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg" Dec 02 14:28:19 crc kubenswrapper[4902]: W1202 14:28:19.985816 4902 reflector.go:561] object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-6md96": failed to list *v1.Secret: secrets "obo-prometheus-operator-admission-webhook-dockercfg-6md96" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-operators": no relationship found between node 'crc' and this object Dec 02 14:28:19 crc kubenswrapper[4902]: E1202 14:28:19.985860 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-operators\"/\"obo-prometheus-operator-admission-webhook-dockercfg-6md96\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"obo-prometheus-operator-admission-webhook-dockercfg-6md96\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:28:19 crc kubenswrapper[4902]: W1202 14:28:19.986025 4902 reflector.go:561] object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert": failed to list *v1.Secret: secrets "obo-prometheus-operator-admission-webhook-service-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-operators": no relationship found between node 'crc' and this object Dec 02 14:28:19 crc kubenswrapper[4902]: E1202 14:28:19.986081 4902 reflector.go:158] "Unhandled Error" err="object-\"openshift-operators\"/\"obo-prometheus-operator-admission-webhook-service-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"obo-prometheus-operator-admission-webhook-service-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 14:28:19 crc kubenswrapper[4902]: I1202 14:28:19.991256 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6"] Dec 02 14:28:19 crc kubenswrapper[4902]: I1202 14:28:19.991939 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6" Dec 02 14:28:19 crc kubenswrapper[4902]: I1202 14:28:19.999347 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg"] Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.014469 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6"] Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.048204 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8b03406b-b481-4193-b543-a1f91deefefd-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-chdpg\" (UID: \"8b03406b-b481-4193-b543-a1f91deefefd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.048270 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz8kt\" (UniqueName: \"kubernetes.io/projected/cd642e4b-b7fa-42d6-a4ca-629c74ff1f5e-kube-api-access-wz8kt\") pod \"obo-prometheus-operator-668cf9dfbb-sbwhh\" (UID: \"cd642e4b-b7fa-42d6-a4ca-629c74ff1f5e\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sbwhh" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.048346 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8b03406b-b481-4193-b543-a1f91deefefd-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-chdpg\" (UID: \"8b03406b-b481-4193-b543-a1f91deefefd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.048411 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b411880c-1f14-41da-bbc5-85543ddf20d7-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-qdwg6\" (UID: \"b411880c-1f14-41da-bbc5-85543ddf20d7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.048438 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b411880c-1f14-41da-bbc5-85543ddf20d7-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-qdwg6\" (UID: \"b411880c-1f14-41da-bbc5-85543ddf20d7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.093526 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-tghq9"] Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.094349 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.096317 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-gqwwd" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.096456 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.110532 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-tghq9"] Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.149231 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8b03406b-b481-4193-b543-a1f91deefefd-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-chdpg\" (UID: \"8b03406b-b481-4193-b543-a1f91deefefd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.149312 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b411880c-1f14-41da-bbc5-85543ddf20d7-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-qdwg6\" (UID: \"b411880c-1f14-41da-bbc5-85543ddf20d7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.149334 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b411880c-1f14-41da-bbc5-85543ddf20d7-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-qdwg6\" (UID: \"b411880c-1f14-41da-bbc5-85543ddf20d7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.149373 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8b03406b-b481-4193-b543-a1f91deefefd-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-chdpg\" (UID: \"8b03406b-b481-4193-b543-a1f91deefefd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.149413 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/c7fe3eed-6ecf-406a-9552-4f2a601eb860-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-tghq9\" (UID: \"c7fe3eed-6ecf-406a-9552-4f2a601eb860\") " pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.149440 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz8kt\" (UniqueName: \"kubernetes.io/projected/cd642e4b-b7fa-42d6-a4ca-629c74ff1f5e-kube-api-access-wz8kt\") pod \"obo-prometheus-operator-668cf9dfbb-sbwhh\" (UID: \"cd642e4b-b7fa-42d6-a4ca-629c74ff1f5e\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sbwhh" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.149477 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xn9zn\" (UniqueName: \"kubernetes.io/projected/c7fe3eed-6ecf-406a-9552-4f2a601eb860-kube-api-access-xn9zn\") pod \"observability-operator-d8bb48f5d-tghq9\" (UID: \"c7fe3eed-6ecf-406a-9552-4f2a601eb860\") " pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.167918 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz8kt\" (UniqueName: \"kubernetes.io/projected/cd642e4b-b7fa-42d6-a4ca-629c74ff1f5e-kube-api-access-wz8kt\") pod \"obo-prometheus-operator-668cf9dfbb-sbwhh\" (UID: \"cd642e4b-b7fa-42d6-a4ca-629c74ff1f5e\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sbwhh" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.199723 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sbwhh" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.242650 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-hwvtn"] Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.243460 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-hwvtn" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.245791 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-q8pxx" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.250239 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/f7d12c85-7736-436e-a273-03025b1fc05b-openshift-service-ca\") pod \"perses-operator-5446b9c989-hwvtn\" (UID: \"f7d12c85-7736-436e-a273-03025b1fc05b\") " pod="openshift-operators/perses-operator-5446b9c989-hwvtn" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.250288 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/c7fe3eed-6ecf-406a-9552-4f2a601eb860-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-tghq9\" (UID: \"c7fe3eed-6ecf-406a-9552-4f2a601eb860\") " pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.250310 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rn7h\" (UniqueName: \"kubernetes.io/projected/f7d12c85-7736-436e-a273-03025b1fc05b-kube-api-access-6rn7h\") pod \"perses-operator-5446b9c989-hwvtn\" (UID: \"f7d12c85-7736-436e-a273-03025b1fc05b\") " pod="openshift-operators/perses-operator-5446b9c989-hwvtn" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.250349 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xn9zn\" (UniqueName: \"kubernetes.io/projected/c7fe3eed-6ecf-406a-9552-4f2a601eb860-kube-api-access-xn9zn\") pod \"observability-operator-d8bb48f5d-tghq9\" (UID: \"c7fe3eed-6ecf-406a-9552-4f2a601eb860\") " pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.255238 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/c7fe3eed-6ecf-406a-9552-4f2a601eb860-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-tghq9\" (UID: \"c7fe3eed-6ecf-406a-9552-4f2a601eb860\") " pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.260211 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-hwvtn"] Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.274297 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xn9zn\" (UniqueName: \"kubernetes.io/projected/c7fe3eed-6ecf-406a-9552-4f2a601eb860-kube-api-access-xn9zn\") pod \"observability-operator-d8bb48f5d-tghq9\" (UID: \"c7fe3eed-6ecf-406a-9552-4f2a601eb860\") " pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.351417 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rn7h\" (UniqueName: \"kubernetes.io/projected/f7d12c85-7736-436e-a273-03025b1fc05b-kube-api-access-6rn7h\") pod \"perses-operator-5446b9c989-hwvtn\" (UID: \"f7d12c85-7736-436e-a273-03025b1fc05b\") " pod="openshift-operators/perses-operator-5446b9c989-hwvtn" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.351576 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/f7d12c85-7736-436e-a273-03025b1fc05b-openshift-service-ca\") pod \"perses-operator-5446b9c989-hwvtn\" (UID: \"f7d12c85-7736-436e-a273-03025b1fc05b\") " pod="openshift-operators/perses-operator-5446b9c989-hwvtn" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.352587 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/f7d12c85-7736-436e-a273-03025b1fc05b-openshift-service-ca\") pod \"perses-operator-5446b9c989-hwvtn\" (UID: \"f7d12c85-7736-436e-a273-03025b1fc05b\") " pod="openshift-operators/perses-operator-5446b9c989-hwvtn" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.372315 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rn7h\" (UniqueName: \"kubernetes.io/projected/f7d12c85-7736-436e-a273-03025b1fc05b-kube-api-access-6rn7h\") pod \"perses-operator-5446b9c989-hwvtn\" (UID: \"f7d12c85-7736-436e-a273-03025b1fc05b\") " pod="openshift-operators/perses-operator-5446b9c989-hwvtn" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.410863 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.432532 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-sbwhh"] Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.613104 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-hwvtn" Dec 02 14:28:20 crc kubenswrapper[4902]: I1202 14:28:20.656344 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-tghq9"] Dec 02 14:28:20 crc kubenswrapper[4902]: W1202 14:28:20.664034 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc7fe3eed_6ecf_406a_9552_4f2a601eb860.slice/crio-4a657a918b80c7974e616d48285bbe008230a41fea0c35415424a1ca66404dc6 WatchSource:0}: Error finding container 4a657a918b80c7974e616d48285bbe008230a41fea0c35415424a1ca66404dc6: Status 404 returned error can't find the container with id 4a657a918b80c7974e616d48285bbe008230a41fea0c35415424a1ca66404dc6 Dec 02 14:28:21 crc kubenswrapper[4902]: I1202 14:28:21.044829 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sbwhh" event={"ID":"cd642e4b-b7fa-42d6-a4ca-629c74ff1f5e","Type":"ContainerStarted","Data":"c9b367c6a83d741ded3e49a3b6da780386526ed84cef80832a34e6ff3a2d3806"} Dec 02 14:28:21 crc kubenswrapper[4902]: I1202 14:28:21.045893 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" event={"ID":"c7fe3eed-6ecf-406a-9552-4f2a601eb860","Type":"ContainerStarted","Data":"4a657a918b80c7974e616d48285bbe008230a41fea0c35415424a1ca66404dc6"} Dec 02 14:28:21 crc kubenswrapper[4902]: I1202 14:28:21.056492 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-hwvtn"] Dec 02 14:28:21 crc kubenswrapper[4902]: W1202 14:28:21.058249 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7d12c85_7736_436e_a273_03025b1fc05b.slice/crio-5658d42ed4a2637c7b3caf772c4f4551d80e86bafd1cd0f92b183b1492fe97b9 WatchSource:0}: Error finding container 5658d42ed4a2637c7b3caf772c4f4551d80e86bafd1cd0f92b183b1492fe97b9: Status 404 returned error can't find the container with id 5658d42ed4a2637c7b3caf772c4f4551d80e86bafd1cd0f92b183b1492fe97b9 Dec 02 14:28:21 crc kubenswrapper[4902]: E1202 14:28:21.150397 4902 secret.go:188] Couldn't get secret openshift-operators/obo-prometheus-operator-admission-webhook-service-cert: failed to sync secret cache: timed out waiting for the condition Dec 02 14:28:21 crc kubenswrapper[4902]: E1202 14:28:21.150430 4902 secret.go:188] Couldn't get secret openshift-operators/obo-prometheus-operator-admission-webhook-service-cert: failed to sync secret cache: timed out waiting for the condition Dec 02 14:28:21 crc kubenswrapper[4902]: E1202 14:28:21.150401 4902 secret.go:188] Couldn't get secret openshift-operators/obo-prometheus-operator-admission-webhook-service-cert: failed to sync secret cache: timed out waiting for the condition Dec 02 14:28:21 crc kubenswrapper[4902]: E1202 14:28:21.150412 4902 secret.go:188] Couldn't get secret openshift-operators/obo-prometheus-operator-admission-webhook-service-cert: failed to sync secret cache: timed out waiting for the condition Dec 02 14:28:21 crc kubenswrapper[4902]: E1202 14:28:21.150486 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8b03406b-b481-4193-b543-a1f91deefefd-webhook-cert podName:8b03406b-b481-4193-b543-a1f91deefefd nodeName:}" failed. No retries permitted until 2025-12-02 14:28:21.65046314 +0000 UTC m=+732.841771849 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-cert" (UniqueName: "kubernetes.io/secret/8b03406b-b481-4193-b543-a1f91deefefd-webhook-cert") pod "obo-prometheus-operator-admission-webhook-698f669544-chdpg" (UID: "8b03406b-b481-4193-b543-a1f91deefefd") : failed to sync secret cache: timed out waiting for the condition Dec 02 14:28:21 crc kubenswrapper[4902]: E1202 14:28:21.150544 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b411880c-1f14-41da-bbc5-85543ddf20d7-webhook-cert podName:b411880c-1f14-41da-bbc5-85543ddf20d7 nodeName:}" failed. No retries permitted until 2025-12-02 14:28:21.650530812 +0000 UTC m=+732.841839531 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-cert" (UniqueName: "kubernetes.io/secret/b411880c-1f14-41da-bbc5-85543ddf20d7-webhook-cert") pod "obo-prometheus-operator-admission-webhook-698f669544-qdwg6" (UID: "b411880c-1f14-41da-bbc5-85543ddf20d7") : failed to sync secret cache: timed out waiting for the condition Dec 02 14:28:21 crc kubenswrapper[4902]: E1202 14:28:21.150556 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b411880c-1f14-41da-bbc5-85543ddf20d7-apiservice-cert podName:b411880c-1f14-41da-bbc5-85543ddf20d7 nodeName:}" failed. No retries permitted until 2025-12-02 14:28:21.650549883 +0000 UTC m=+732.841858592 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "apiservice-cert" (UniqueName: "kubernetes.io/secret/b411880c-1f14-41da-bbc5-85543ddf20d7-apiservice-cert") pod "obo-prometheus-operator-admission-webhook-698f669544-qdwg6" (UID: "b411880c-1f14-41da-bbc5-85543ddf20d7") : failed to sync secret cache: timed out waiting for the condition Dec 02 14:28:21 crc kubenswrapper[4902]: E1202 14:28:21.150586 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8b03406b-b481-4193-b543-a1f91deefefd-apiservice-cert podName:8b03406b-b481-4193-b543-a1f91deefefd nodeName:}" failed. No retries permitted until 2025-12-02 14:28:21.650579594 +0000 UTC m=+732.841888313 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "apiservice-cert" (UniqueName: "kubernetes.io/secret/8b03406b-b481-4193-b543-a1f91deefefd-apiservice-cert") pod "obo-prometheus-operator-admission-webhook-698f669544-chdpg" (UID: "8b03406b-b481-4193-b543-a1f91deefefd") : failed to sync secret cache: timed out waiting for the condition Dec 02 14:28:21 crc kubenswrapper[4902]: I1202 14:28:21.262748 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-6md96" Dec 02 14:28:21 crc kubenswrapper[4902]: I1202 14:28:21.391666 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 02 14:28:21 crc kubenswrapper[4902]: I1202 14:28:21.667733 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b411880c-1f14-41da-bbc5-85543ddf20d7-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-qdwg6\" (UID: \"b411880c-1f14-41da-bbc5-85543ddf20d7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6" Dec 02 14:28:21 crc kubenswrapper[4902]: I1202 14:28:21.668122 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b411880c-1f14-41da-bbc5-85543ddf20d7-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-qdwg6\" (UID: \"b411880c-1f14-41da-bbc5-85543ddf20d7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6" Dec 02 14:28:21 crc kubenswrapper[4902]: I1202 14:28:21.668163 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8b03406b-b481-4193-b543-a1f91deefefd-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-chdpg\" (UID: \"8b03406b-b481-4193-b543-a1f91deefefd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg" Dec 02 14:28:21 crc kubenswrapper[4902]: I1202 14:28:21.668243 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8b03406b-b481-4193-b543-a1f91deefefd-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-chdpg\" (UID: \"8b03406b-b481-4193-b543-a1f91deefefd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg" Dec 02 14:28:21 crc kubenswrapper[4902]: I1202 14:28:21.675071 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b411880c-1f14-41da-bbc5-85543ddf20d7-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-qdwg6\" (UID: \"b411880c-1f14-41da-bbc5-85543ddf20d7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6" Dec 02 14:28:21 crc kubenswrapper[4902]: I1202 14:28:21.675204 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8b03406b-b481-4193-b543-a1f91deefefd-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-chdpg\" (UID: \"8b03406b-b481-4193-b543-a1f91deefefd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg" Dec 02 14:28:21 crc kubenswrapper[4902]: I1202 14:28:21.675555 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b411880c-1f14-41da-bbc5-85543ddf20d7-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-qdwg6\" (UID: \"b411880c-1f14-41da-bbc5-85543ddf20d7\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6" Dec 02 14:28:21 crc kubenswrapper[4902]: I1202 14:28:21.678202 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8b03406b-b481-4193-b543-a1f91deefefd-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-698f669544-chdpg\" (UID: \"8b03406b-b481-4193-b543-a1f91deefefd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg" Dec 02 14:28:21 crc kubenswrapper[4902]: I1202 14:28:21.804439 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg" Dec 02 14:28:21 crc kubenswrapper[4902]: I1202 14:28:21.816758 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6" Dec 02 14:28:22 crc kubenswrapper[4902]: I1202 14:28:22.050981 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-hwvtn" event={"ID":"f7d12c85-7736-436e-a273-03025b1fc05b","Type":"ContainerStarted","Data":"5658d42ed4a2637c7b3caf772c4f4551d80e86bafd1cd0f92b183b1492fe97b9"} Dec 02 14:28:22 crc kubenswrapper[4902]: I1202 14:28:22.323178 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6"] Dec 02 14:28:22 crc kubenswrapper[4902]: I1202 14:28:22.628536 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg"] Dec 02 14:28:22 crc kubenswrapper[4902]: W1202 14:28:22.657736 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8b03406b_b481_4193_b543_a1f91deefefd.slice/crio-c6b6bde8502c66e84c7b8b5901133c1e8986272c5920ae8d3df9047726643886 WatchSource:0}: Error finding container c6b6bde8502c66e84c7b8b5901133c1e8986272c5920ae8d3df9047726643886: Status 404 returned error can't find the container with id c6b6bde8502c66e84c7b8b5901133c1e8986272c5920ae8d3df9047726643886 Dec 02 14:28:23 crc kubenswrapper[4902]: I1202 14:28:23.058844 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6" event={"ID":"b411880c-1f14-41da-bbc5-85543ddf20d7","Type":"ContainerStarted","Data":"54d3f50cdd3c9f797680dec94c60bdf37626a993dbacbe718c77866c0f199b0e"} Dec 02 14:28:23 crc kubenswrapper[4902]: I1202 14:28:23.059957 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg" event={"ID":"8b03406b-b481-4193-b543-a1f91deefefd","Type":"ContainerStarted","Data":"c6b6bde8502c66e84c7b8b5901133c1e8986272c5920ae8d3df9047726643886"} Dec 02 14:28:36 crc kubenswrapper[4902]: E1202 14:28:36.466623 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3" Dec 02 14:28:36 crc kubenswrapper[4902]: E1202 14:28:36.467314 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wz8kt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-668cf9dfbb-sbwhh_openshift-operators(cd642e4b-b7fa-42d6-a4ca-629c74ff1f5e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 14:28:36 crc kubenswrapper[4902]: E1202 14:28:36.469319 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sbwhh" podUID="cd642e4b-b7fa-42d6-a4ca-629c74ff1f5e" Dec 02 14:28:37 crc kubenswrapper[4902]: E1202 14:28:37.091905 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385" Dec 02 14:28:37 crc kubenswrapper[4902]: E1202 14:28:37.092086 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:perses-operator,Image:registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openshift-service-ca,ReadOnly:true,MountPath:/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6rn7h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod perses-operator-5446b9c989-hwvtn_openshift-operators(f7d12c85-7736-436e-a273-03025b1fc05b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 14:28:37 crc kubenswrapper[4902]: E1202 14:28:37.093292 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/perses-operator-5446b9c989-hwvtn" podUID="f7d12c85-7736-436e-a273-03025b1fc05b" Dec 02 14:28:37 crc kubenswrapper[4902]: E1202 14:28:37.169868 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385\\\"\"" pod="openshift-operators/perses-operator-5446b9c989-hwvtn" podUID="f7d12c85-7736-436e-a273-03025b1fc05b" Dec 02 14:28:37 crc kubenswrapper[4902]: E1202 14:28:37.170233 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3\\\"\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sbwhh" podUID="cd642e4b-b7fa-42d6-a4ca-629c74ff1f5e" Dec 02 14:28:39 crc kubenswrapper[4902]: E1202 14:28:39.546245 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb" Dec 02 14:28:39 crc kubenswrapper[4902]: E1202 14:28:39.546749 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb,Command:[],Args:[--namespace=$(NAMESPACE) --images=perses=$(RELATED_IMAGE_PERSES) --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) --images=thanos=$(RELATED_IMAGE_THANOS) --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) --images=korrel8r=$(RELATED_IMAGE_KORREL8R) --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) --openshift.enabled=true],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER,Value:registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:e718854a7d6ca8accf0fa72db0eb902e46c44d747ad51dc3f06bba0cefaa3c01,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS,Value:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:17ea20be390a94ab39f5cdd7f0cbc2498046eebcf77fe3dec9aa288d5c2cf46b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_THANOS,Value:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PERSES,Value:registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:91531137fc1dcd740e277e0f65e120a0176a16f788c14c27925b61aa0b792ade,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:a69da8bbca8a28dd2925f864d51cc31cf761b10532c553095ba40b242ef701cb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:897e1bfad1187062725b54d87107bd0155972257a50d8335dd29e1999b828a4f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:95fe5b5746ca8c07ac9217ce2d8ac8e6afad17af210f9d8e0074df1310b209a8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:e9d9a89e4d8126a62b1852055482258ee528cac6398dd5d43ebad75ace0f33c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:ec684a0645ceb917b019af7ddba68c3533416e356ab0d0320a30e75ca7ebb31b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:3b9693fcde9b3a9494fb04735b1f7cfd0426f10be820fdc3f024175c0d3df1c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:580606f194180accc8abba099e17a26dca7522ec6d233fa2fdd40312771703e3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:e03777be39e71701935059cd877603874a13ac94daa73219d4e5e545599d78a9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:aa47256193cfd2877853878e1ae97d2ab8b8e5deae62b387cbfad02b284d379c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KORREL8R,Value:registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:c595ff56b2cb85514bf4784db6ddb82e4e657e3e708a7fb695fc4997379a94d4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER,Value:registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:45a4ec2a519bcec99e886aa91596d5356a2414a2bd103baaef9fa7838c672eb2,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{400 -3} {} 400m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:observability-operator-tls,ReadOnly:true,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xn9zn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod observability-operator-d8bb48f5d-tghq9_openshift-operators(c7fe3eed-6ecf-406a-9552-4f2a601eb860): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 14:28:39 crc kubenswrapper[4902]: E1202 14:28:39.548093 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" podUID="c7fe3eed-6ecf-406a-9552-4f2a601eb860" Dec 02 14:28:40 crc kubenswrapper[4902]: I1202 14:28:40.182161 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg" event={"ID":"8b03406b-b481-4193-b543-a1f91deefefd","Type":"ContainerStarted","Data":"67295fe10ece931b61cac0e3792c99f6c0efc22f77a8f5167139098a6d57ef60"} Dec 02 14:28:40 crc kubenswrapper[4902]: I1202 14:28:40.185181 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6" event={"ID":"b411880c-1f14-41da-bbc5-85543ddf20d7","Type":"ContainerStarted","Data":"93bb797200ddd0df7ff68c84ff872ebfb6f39915ad3e7293c6277f9f2cf4b3cd"} Dec 02 14:28:40 crc kubenswrapper[4902]: E1202 14:28:40.185314 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb\\\"\"" pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" podUID="c7fe3eed-6ecf-406a-9552-4f2a601eb860" Dec 02 14:28:40 crc kubenswrapper[4902]: I1202 14:28:40.201749 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-chdpg" podStartSLOduration=4.337151135 podStartE2EDuration="21.201733146s" podCreationTimestamp="2025-12-02 14:28:19 +0000 UTC" firstStartedPulling="2025-12-02 14:28:22.680308585 +0000 UTC m=+733.871617294" lastFinishedPulling="2025-12-02 14:28:39.544890596 +0000 UTC m=+750.736199305" observedRunningTime="2025-12-02 14:28:40.199726728 +0000 UTC m=+751.391035437" watchObservedRunningTime="2025-12-02 14:28:40.201733146 +0000 UTC m=+751.393041855" Dec 02 14:28:48 crc kubenswrapper[4902]: I1202 14:28:48.132240 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-698f669544-qdwg6" podStartSLOduration=11.951026252 podStartE2EDuration="29.132218085s" podCreationTimestamp="2025-12-02 14:28:19 +0000 UTC" firstStartedPulling="2025-12-02 14:28:22.345709071 +0000 UTC m=+733.537017780" lastFinishedPulling="2025-12-02 14:28:39.526900894 +0000 UTC m=+750.718209613" observedRunningTime="2025-12-02 14:28:40.268882466 +0000 UTC m=+751.460191185" watchObservedRunningTime="2025-12-02 14:28:48.132218085 +0000 UTC m=+759.323526804" Dec 02 14:28:49 crc kubenswrapper[4902]: I1202 14:28:49.227528 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-hwvtn" event={"ID":"f7d12c85-7736-436e-a273-03025b1fc05b","Type":"ContainerStarted","Data":"5272d12e0d576019a1bfa546416af2cec082ea829528a98f2d2ff41729516136"} Dec 02 14:28:49 crc kubenswrapper[4902]: I1202 14:28:49.228130 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-hwvtn" Dec 02 14:28:49 crc kubenswrapper[4902]: I1202 14:28:49.242261 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-hwvtn" podStartSLOduration=1.506464635 podStartE2EDuration="29.24224012s" podCreationTimestamp="2025-12-02 14:28:20 +0000 UTC" firstStartedPulling="2025-12-02 14:28:21.063632819 +0000 UTC m=+732.254941528" lastFinishedPulling="2025-12-02 14:28:48.799408304 +0000 UTC m=+759.990717013" observedRunningTime="2025-12-02 14:28:49.24086304 +0000 UTC m=+760.432171779" watchObservedRunningTime="2025-12-02 14:28:49.24224012 +0000 UTC m=+760.433548829" Dec 02 14:28:50 crc kubenswrapper[4902]: I1202 14:28:50.233526 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sbwhh" event={"ID":"cd642e4b-b7fa-42d6-a4ca-629c74ff1f5e","Type":"ContainerStarted","Data":"79fe8c9be05c5770585a0f1e2aeb03f0b77db8d98688b9ff6dfc10969e5083ab"} Dec 02 14:28:50 crc kubenswrapper[4902]: I1202 14:28:50.252906 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sbwhh" podStartSLOduration=1.888402621 podStartE2EDuration="31.252888511s" podCreationTimestamp="2025-12-02 14:28:19 +0000 UTC" firstStartedPulling="2025-12-02 14:28:20.456796111 +0000 UTC m=+731.648104820" lastFinishedPulling="2025-12-02 14:28:49.821281991 +0000 UTC m=+761.012590710" observedRunningTime="2025-12-02 14:28:50.252298834 +0000 UTC m=+761.443607553" watchObservedRunningTime="2025-12-02 14:28:50.252888511 +0000 UTC m=+761.444197220" Dec 02 14:28:58 crc kubenswrapper[4902]: I1202 14:28:58.075970 4902 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 14:28:58 crc kubenswrapper[4902]: I1202 14:28:58.280035 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" event={"ID":"c7fe3eed-6ecf-406a-9552-4f2a601eb860","Type":"ContainerStarted","Data":"f9fd3f0e6e2e78a71f42360a18bef56ec9ef1b201c4905fa24973587580b65b1"} Dec 02 14:28:58 crc kubenswrapper[4902]: I1202 14:28:58.280541 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" Dec 02 14:28:58 crc kubenswrapper[4902]: I1202 14:28:58.306147 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" podStartSLOduration=1.7688144019999998 podStartE2EDuration="38.306131873s" podCreationTimestamp="2025-12-02 14:28:20 +0000 UTC" firstStartedPulling="2025-12-02 14:28:20.667829398 +0000 UTC m=+731.859138107" lastFinishedPulling="2025-12-02 14:28:57.205146859 +0000 UTC m=+768.396455578" observedRunningTime="2025-12-02 14:28:58.304101034 +0000 UTC m=+769.495409753" watchObservedRunningTime="2025-12-02 14:28:58.306131873 +0000 UTC m=+769.497440592" Dec 02 14:28:58 crc kubenswrapper[4902]: I1202 14:28:58.310679 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" Dec 02 14:29:00 crc kubenswrapper[4902]: I1202 14:29:00.617745 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-hwvtn" Dec 02 14:29:17 crc kubenswrapper[4902]: I1202 14:29:17.902455 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl"] Dec 02 14:29:17 crc kubenswrapper[4902]: I1202 14:29:17.904801 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" Dec 02 14:29:17 crc kubenswrapper[4902]: I1202 14:29:17.906986 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 14:29:17 crc kubenswrapper[4902]: I1202 14:29:17.913763 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl"] Dec 02 14:29:17 crc kubenswrapper[4902]: I1202 14:29:17.980950 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9abb66fe-7d84-40db-981f-b19ac735c12a-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl\" (UID: \"9abb66fe-7d84-40db-981f-b19ac735c12a\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" Dec 02 14:29:17 crc kubenswrapper[4902]: I1202 14:29:17.981196 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrvv4\" (UniqueName: \"kubernetes.io/projected/9abb66fe-7d84-40db-981f-b19ac735c12a-kube-api-access-nrvv4\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl\" (UID: \"9abb66fe-7d84-40db-981f-b19ac735c12a\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" Dec 02 14:29:17 crc kubenswrapper[4902]: I1202 14:29:17.981316 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9abb66fe-7d84-40db-981f-b19ac735c12a-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl\" (UID: \"9abb66fe-7d84-40db-981f-b19ac735c12a\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" Dec 02 14:29:18 crc kubenswrapper[4902]: I1202 14:29:18.082366 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9abb66fe-7d84-40db-981f-b19ac735c12a-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl\" (UID: \"9abb66fe-7d84-40db-981f-b19ac735c12a\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" Dec 02 14:29:18 crc kubenswrapper[4902]: I1202 14:29:18.082679 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrvv4\" (UniqueName: \"kubernetes.io/projected/9abb66fe-7d84-40db-981f-b19ac735c12a-kube-api-access-nrvv4\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl\" (UID: \"9abb66fe-7d84-40db-981f-b19ac735c12a\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" Dec 02 14:29:18 crc kubenswrapper[4902]: I1202 14:29:18.082825 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9abb66fe-7d84-40db-981f-b19ac735c12a-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl\" (UID: \"9abb66fe-7d84-40db-981f-b19ac735c12a\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" Dec 02 14:29:18 crc kubenswrapper[4902]: I1202 14:29:18.082828 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9abb66fe-7d84-40db-981f-b19ac735c12a-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl\" (UID: \"9abb66fe-7d84-40db-981f-b19ac735c12a\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" Dec 02 14:29:18 crc kubenswrapper[4902]: I1202 14:29:18.083212 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9abb66fe-7d84-40db-981f-b19ac735c12a-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl\" (UID: \"9abb66fe-7d84-40db-981f-b19ac735c12a\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" Dec 02 14:29:18 crc kubenswrapper[4902]: I1202 14:29:18.100633 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrvv4\" (UniqueName: \"kubernetes.io/projected/9abb66fe-7d84-40db-981f-b19ac735c12a-kube-api-access-nrvv4\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl\" (UID: \"9abb66fe-7d84-40db-981f-b19ac735c12a\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" Dec 02 14:29:18 crc kubenswrapper[4902]: I1202 14:29:18.220677 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" Dec 02 14:29:18 crc kubenswrapper[4902]: I1202 14:29:18.581118 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl"] Dec 02 14:29:18 crc kubenswrapper[4902]: W1202 14:29:18.586947 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9abb66fe_7d84_40db_981f_b19ac735c12a.slice/crio-6909c699ba6b6d1c392d520acf8d4bf6e8c723b30b79d37952fff842c95cc516 WatchSource:0}: Error finding container 6909c699ba6b6d1c392d520acf8d4bf6e8c723b30b79d37952fff842c95cc516: Status 404 returned error can't find the container with id 6909c699ba6b6d1c392d520acf8d4bf6e8c723b30b79d37952fff842c95cc516 Dec 02 14:29:19 crc kubenswrapper[4902]: I1202 14:29:19.489289 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" event={"ID":"9abb66fe-7d84-40db-981f-b19ac735c12a","Type":"ContainerStarted","Data":"6b7174c99cc4a05e20130d205aefa83e69e845f9ed9c83417a47fd754cff6dd3"} Dec 02 14:29:19 crc kubenswrapper[4902]: I1202 14:29:19.490456 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" event={"ID":"9abb66fe-7d84-40db-981f-b19ac735c12a","Type":"ContainerStarted","Data":"6909c699ba6b6d1c392d520acf8d4bf6e8c723b30b79d37952fff842c95cc516"} Dec 02 14:29:19 crc kubenswrapper[4902]: E1202 14:29:19.604429 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9abb66fe_7d84_40db_981f_b19ac735c12a.slice/crio-6b7174c99cc4a05e20130d205aefa83e69e845f9ed9c83417a47fd754cff6dd3.scope\": RecentStats: unable to find data in memory cache]" Dec 02 14:29:20 crc kubenswrapper[4902]: I1202 14:29:20.498084 4902 generic.go:334] "Generic (PLEG): container finished" podID="9abb66fe-7d84-40db-981f-b19ac735c12a" containerID="6b7174c99cc4a05e20130d205aefa83e69e845f9ed9c83417a47fd754cff6dd3" exitCode=0 Dec 02 14:29:20 crc kubenswrapper[4902]: I1202 14:29:20.498134 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" event={"ID":"9abb66fe-7d84-40db-981f-b19ac735c12a","Type":"ContainerDied","Data":"6b7174c99cc4a05e20130d205aefa83e69e845f9ed9c83417a47fd754cff6dd3"} Dec 02 14:29:21 crc kubenswrapper[4902]: I1202 14:29:21.471643 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2vttl"] Dec 02 14:29:21 crc kubenswrapper[4902]: I1202 14:29:21.473621 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:21 crc kubenswrapper[4902]: I1202 14:29:21.488657 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2vttl"] Dec 02 14:29:21 crc kubenswrapper[4902]: I1202 14:29:21.531787 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zm88\" (UniqueName: \"kubernetes.io/projected/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-kube-api-access-4zm88\") pod \"redhat-operators-2vttl\" (UID: \"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb\") " pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:21 crc kubenswrapper[4902]: I1202 14:29:21.531850 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-catalog-content\") pod \"redhat-operators-2vttl\" (UID: \"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb\") " pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:21 crc kubenswrapper[4902]: I1202 14:29:21.531884 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-utilities\") pod \"redhat-operators-2vttl\" (UID: \"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb\") " pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:21 crc kubenswrapper[4902]: I1202 14:29:21.633675 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zm88\" (UniqueName: \"kubernetes.io/projected/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-kube-api-access-4zm88\") pod \"redhat-operators-2vttl\" (UID: \"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb\") " pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:21 crc kubenswrapper[4902]: I1202 14:29:21.633727 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-catalog-content\") pod \"redhat-operators-2vttl\" (UID: \"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb\") " pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:21 crc kubenswrapper[4902]: I1202 14:29:21.633757 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-utilities\") pod \"redhat-operators-2vttl\" (UID: \"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb\") " pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:21 crc kubenswrapper[4902]: I1202 14:29:21.634280 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-utilities\") pod \"redhat-operators-2vttl\" (UID: \"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb\") " pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:21 crc kubenswrapper[4902]: I1202 14:29:21.634498 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-catalog-content\") pod \"redhat-operators-2vttl\" (UID: \"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb\") " pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:21 crc kubenswrapper[4902]: I1202 14:29:21.655399 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zm88\" (UniqueName: \"kubernetes.io/projected/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-kube-api-access-4zm88\") pod \"redhat-operators-2vttl\" (UID: \"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb\") " pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:21 crc kubenswrapper[4902]: I1202 14:29:21.788262 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:22 crc kubenswrapper[4902]: I1202 14:29:22.072075 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2vttl"] Dec 02 14:29:22 crc kubenswrapper[4902]: W1202 14:29:22.078024 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podecd31dd7_18ae_4b49_8bef_8c997dd76ccb.slice/crio-e3fe78f580f1b1e4b8196786f98988b603ea2e558de1997bee047a4a1fdd4fcc WatchSource:0}: Error finding container e3fe78f580f1b1e4b8196786f98988b603ea2e558de1997bee047a4a1fdd4fcc: Status 404 returned error can't find the container with id e3fe78f580f1b1e4b8196786f98988b603ea2e558de1997bee047a4a1fdd4fcc Dec 02 14:29:22 crc kubenswrapper[4902]: I1202 14:29:22.510595 4902 generic.go:334] "Generic (PLEG): container finished" podID="ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" containerID="0556468dd57cd6f88db5b9925734540525ed61e59c2baf2804a3a1238e411242" exitCode=0 Dec 02 14:29:22 crc kubenswrapper[4902]: I1202 14:29:22.510631 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vttl" event={"ID":"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb","Type":"ContainerDied","Data":"0556468dd57cd6f88db5b9925734540525ed61e59c2baf2804a3a1238e411242"} Dec 02 14:29:22 crc kubenswrapper[4902]: I1202 14:29:22.510653 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vttl" event={"ID":"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb","Type":"ContainerStarted","Data":"e3fe78f580f1b1e4b8196786f98988b603ea2e558de1997bee047a4a1fdd4fcc"} Dec 02 14:29:23 crc kubenswrapper[4902]: I1202 14:29:23.520821 4902 generic.go:334] "Generic (PLEG): container finished" podID="9abb66fe-7d84-40db-981f-b19ac735c12a" containerID="babcea0fbbe6c909688a1e2ea1910ba715b8d165d130666324d5f2fb33dadb19" exitCode=0 Dec 02 14:29:23 crc kubenswrapper[4902]: I1202 14:29:23.521023 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" event={"ID":"9abb66fe-7d84-40db-981f-b19ac735c12a","Type":"ContainerDied","Data":"babcea0fbbe6c909688a1e2ea1910ba715b8d165d130666324d5f2fb33dadb19"} Dec 02 14:29:24 crc kubenswrapper[4902]: I1202 14:29:24.551974 4902 generic.go:334] "Generic (PLEG): container finished" podID="9abb66fe-7d84-40db-981f-b19ac735c12a" containerID="9ddffee59b58834a5aa04dc3bb33c80109d01f2a12430e7747e37de32821bc23" exitCode=0 Dec 02 14:29:24 crc kubenswrapper[4902]: I1202 14:29:24.552070 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" event={"ID":"9abb66fe-7d84-40db-981f-b19ac735c12a","Type":"ContainerDied","Data":"9ddffee59b58834a5aa04dc3bb33c80109d01f2a12430e7747e37de32821bc23"} Dec 02 14:29:24 crc kubenswrapper[4902]: I1202 14:29:24.554105 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vttl" event={"ID":"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb","Type":"ContainerStarted","Data":"36ef1633d2240fee10f05ae21fb892d932b179ec0546d8139b9f61079ecb26b2"} Dec 02 14:29:25 crc kubenswrapper[4902]: I1202 14:29:25.889082 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" Dec 02 14:29:25 crc kubenswrapper[4902]: I1202 14:29:25.923529 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrvv4\" (UniqueName: \"kubernetes.io/projected/9abb66fe-7d84-40db-981f-b19ac735c12a-kube-api-access-nrvv4\") pod \"9abb66fe-7d84-40db-981f-b19ac735c12a\" (UID: \"9abb66fe-7d84-40db-981f-b19ac735c12a\") " Dec 02 14:29:25 crc kubenswrapper[4902]: I1202 14:29:25.923673 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9abb66fe-7d84-40db-981f-b19ac735c12a-bundle\") pod \"9abb66fe-7d84-40db-981f-b19ac735c12a\" (UID: \"9abb66fe-7d84-40db-981f-b19ac735c12a\") " Dec 02 14:29:25 crc kubenswrapper[4902]: I1202 14:29:25.924272 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9abb66fe-7d84-40db-981f-b19ac735c12a-bundle" (OuterVolumeSpecName: "bundle") pod "9abb66fe-7d84-40db-981f-b19ac735c12a" (UID: "9abb66fe-7d84-40db-981f-b19ac735c12a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:29:25 crc kubenswrapper[4902]: I1202 14:29:25.929213 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9abb66fe-7d84-40db-981f-b19ac735c12a-kube-api-access-nrvv4" (OuterVolumeSpecName: "kube-api-access-nrvv4") pod "9abb66fe-7d84-40db-981f-b19ac735c12a" (UID: "9abb66fe-7d84-40db-981f-b19ac735c12a"). InnerVolumeSpecName "kube-api-access-nrvv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:29:26 crc kubenswrapper[4902]: I1202 14:29:26.024302 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9abb66fe-7d84-40db-981f-b19ac735c12a-util\") pod \"9abb66fe-7d84-40db-981f-b19ac735c12a\" (UID: \"9abb66fe-7d84-40db-981f-b19ac735c12a\") " Dec 02 14:29:26 crc kubenswrapper[4902]: I1202 14:29:26.024621 4902 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9abb66fe-7d84-40db-981f-b19ac735c12a-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:29:26 crc kubenswrapper[4902]: I1202 14:29:26.024633 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrvv4\" (UniqueName: \"kubernetes.io/projected/9abb66fe-7d84-40db-981f-b19ac735c12a-kube-api-access-nrvv4\") on node \"crc\" DevicePath \"\"" Dec 02 14:29:26 crc kubenswrapper[4902]: I1202 14:29:26.035891 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9abb66fe-7d84-40db-981f-b19ac735c12a-util" (OuterVolumeSpecName: "util") pod "9abb66fe-7d84-40db-981f-b19ac735c12a" (UID: "9abb66fe-7d84-40db-981f-b19ac735c12a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:29:26 crc kubenswrapper[4902]: I1202 14:29:26.125440 4902 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9abb66fe-7d84-40db-981f-b19ac735c12a-util\") on node \"crc\" DevicePath \"\"" Dec 02 14:29:26 crc kubenswrapper[4902]: I1202 14:29:26.570521 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" event={"ID":"9abb66fe-7d84-40db-981f-b19ac735c12a","Type":"ContainerDied","Data":"6909c699ba6b6d1c392d520acf8d4bf6e8c723b30b79d37952fff842c95cc516"} Dec 02 14:29:26 crc kubenswrapper[4902]: I1202 14:29:26.571024 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6909c699ba6b6d1c392d520acf8d4bf6e8c723b30b79d37952fff842c95cc516" Dec 02 14:29:26 crc kubenswrapper[4902]: I1202 14:29:26.571185 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl" Dec 02 14:29:26 crc kubenswrapper[4902]: I1202 14:29:26.573602 4902 generic.go:334] "Generic (PLEG): container finished" podID="ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" containerID="36ef1633d2240fee10f05ae21fb892d932b179ec0546d8139b9f61079ecb26b2" exitCode=0 Dec 02 14:29:26 crc kubenswrapper[4902]: I1202 14:29:26.573644 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vttl" event={"ID":"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb","Type":"ContainerDied","Data":"36ef1633d2240fee10f05ae21fb892d932b179ec0546d8139b9f61079ecb26b2"} Dec 02 14:29:27 crc kubenswrapper[4902]: I1202 14:29:27.580804 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vttl" event={"ID":"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb","Type":"ContainerStarted","Data":"333056d0837ae3c7a6a7561feb9d50ec447576646b931ea0c8f3da9981f4d361"} Dec 02 14:29:27 crc kubenswrapper[4902]: I1202 14:29:27.611156 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2vttl" podStartSLOduration=1.960527056 podStartE2EDuration="6.611134306s" podCreationTimestamp="2025-12-02 14:29:21 +0000 UTC" firstStartedPulling="2025-12-02 14:29:22.511878418 +0000 UTC m=+793.703187117" lastFinishedPulling="2025-12-02 14:29:27.162485648 +0000 UTC m=+798.353794367" observedRunningTime="2025-12-02 14:29:27.595981875 +0000 UTC m=+798.787290584" watchObservedRunningTime="2025-12-02 14:29:27.611134306 +0000 UTC m=+798.802443015" Dec 02 14:29:28 crc kubenswrapper[4902]: I1202 14:29:28.886403 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-hpnm5"] Dec 02 14:29:28 crc kubenswrapper[4902]: E1202 14:29:28.886623 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9abb66fe-7d84-40db-981f-b19ac735c12a" containerName="util" Dec 02 14:29:28 crc kubenswrapper[4902]: I1202 14:29:28.886636 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9abb66fe-7d84-40db-981f-b19ac735c12a" containerName="util" Dec 02 14:29:28 crc kubenswrapper[4902]: E1202 14:29:28.886652 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9abb66fe-7d84-40db-981f-b19ac735c12a" containerName="pull" Dec 02 14:29:28 crc kubenswrapper[4902]: I1202 14:29:28.886658 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9abb66fe-7d84-40db-981f-b19ac735c12a" containerName="pull" Dec 02 14:29:28 crc kubenswrapper[4902]: E1202 14:29:28.886671 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9abb66fe-7d84-40db-981f-b19ac735c12a" containerName="extract" Dec 02 14:29:28 crc kubenswrapper[4902]: I1202 14:29:28.886677 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9abb66fe-7d84-40db-981f-b19ac735c12a" containerName="extract" Dec 02 14:29:28 crc kubenswrapper[4902]: I1202 14:29:28.886778 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9abb66fe-7d84-40db-981f-b19ac735c12a" containerName="extract" Dec 02 14:29:28 crc kubenswrapper[4902]: I1202 14:29:28.887149 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hpnm5" Dec 02 14:29:28 crc kubenswrapper[4902]: I1202 14:29:28.888698 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-4stlx" Dec 02 14:29:28 crc kubenswrapper[4902]: I1202 14:29:28.889140 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 02 14:29:28 crc kubenswrapper[4902]: I1202 14:29:28.890550 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 02 14:29:28 crc kubenswrapper[4902]: I1202 14:29:28.902765 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-hpnm5"] Dec 02 14:29:29 crc kubenswrapper[4902]: I1202 14:29:29.062370 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hq4q9\" (UniqueName: \"kubernetes.io/projected/8d059e00-4b87-410a-8613-a52e1263dc9f-kube-api-access-hq4q9\") pod \"nmstate-operator-5b5b58f5c8-hpnm5\" (UID: \"8d059e00-4b87-410a-8613-a52e1263dc9f\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hpnm5" Dec 02 14:29:29 crc kubenswrapper[4902]: I1202 14:29:29.164123 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hq4q9\" (UniqueName: \"kubernetes.io/projected/8d059e00-4b87-410a-8613-a52e1263dc9f-kube-api-access-hq4q9\") pod \"nmstate-operator-5b5b58f5c8-hpnm5\" (UID: \"8d059e00-4b87-410a-8613-a52e1263dc9f\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hpnm5" Dec 02 14:29:29 crc kubenswrapper[4902]: I1202 14:29:29.185402 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hq4q9\" (UniqueName: \"kubernetes.io/projected/8d059e00-4b87-410a-8613-a52e1263dc9f-kube-api-access-hq4q9\") pod \"nmstate-operator-5b5b58f5c8-hpnm5\" (UID: \"8d059e00-4b87-410a-8613-a52e1263dc9f\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hpnm5" Dec 02 14:29:29 crc kubenswrapper[4902]: I1202 14:29:29.206787 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hpnm5" Dec 02 14:29:29 crc kubenswrapper[4902]: I1202 14:29:29.443681 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-hpnm5"] Dec 02 14:29:29 crc kubenswrapper[4902]: W1202 14:29:29.447070 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d059e00_4b87_410a_8613_a52e1263dc9f.slice/crio-ce2eddf0a193f83a47e85e9ea0e323b6ee40e326567865bffa131fcc517a3467 WatchSource:0}: Error finding container ce2eddf0a193f83a47e85e9ea0e323b6ee40e326567865bffa131fcc517a3467: Status 404 returned error can't find the container with id ce2eddf0a193f83a47e85e9ea0e323b6ee40e326567865bffa131fcc517a3467 Dec 02 14:29:29 crc kubenswrapper[4902]: I1202 14:29:29.593177 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hpnm5" event={"ID":"8d059e00-4b87-410a-8613-a52e1263dc9f","Type":"ContainerStarted","Data":"ce2eddf0a193f83a47e85e9ea0e323b6ee40e326567865bffa131fcc517a3467"} Dec 02 14:29:31 crc kubenswrapper[4902]: I1202 14:29:31.788632 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:31 crc kubenswrapper[4902]: I1202 14:29:31.790626 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:32 crc kubenswrapper[4902]: I1202 14:29:32.832648 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2vttl" podUID="ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" containerName="registry-server" probeResult="failure" output=< Dec 02 14:29:32 crc kubenswrapper[4902]: timeout: failed to connect service ":50051" within 1s Dec 02 14:29:32 crc kubenswrapper[4902]: > Dec 02 14:29:34 crc kubenswrapper[4902]: I1202 14:29:34.733006 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:29:34 crc kubenswrapper[4902]: I1202 14:29:34.733097 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:29:40 crc kubenswrapper[4902]: I1202 14:29:40.660317 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hpnm5" event={"ID":"8d059e00-4b87-410a-8613-a52e1263dc9f","Type":"ContainerStarted","Data":"c19a956ff543c79151ae70e7d4804ec20b2744dccbc3d0f9595b6267175834b6"} Dec 02 14:29:40 crc kubenswrapper[4902]: I1202 14:29:40.679253 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-hpnm5" podStartSLOduration=2.6915025200000002 podStartE2EDuration="12.679234308s" podCreationTimestamp="2025-12-02 14:29:28 +0000 UTC" firstStartedPulling="2025-12-02 14:29:29.450632192 +0000 UTC m=+800.641940891" lastFinishedPulling="2025-12-02 14:29:39.43836397 +0000 UTC m=+810.629672679" observedRunningTime="2025-12-02 14:29:40.676273974 +0000 UTC m=+811.867582693" watchObservedRunningTime="2025-12-02 14:29:40.679234308 +0000 UTC m=+811.870543027" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.594350 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-zvsm4"] Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.595485 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zvsm4" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.598265 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-9f89j" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.605866 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-zvsm4"] Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.610794 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t"] Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.611658 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.614834 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.623747 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-c2qjr"] Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.624709 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.636087 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t"] Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.748364 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8"] Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.749943 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.751792 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.751968 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.752214 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-vv9d9" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.759642 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b31a81b9-0e67-4858-ae54-304c17fd0495-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-8g56t\" (UID: \"b31a81b9-0e67-4858-ae54-304c17fd0495\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.759685 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/4d54d7c2-5d4a-41b5-9949-96b8ab11df5e-nmstate-lock\") pod \"nmstate-handler-c2qjr\" (UID: \"4d54d7c2-5d4a-41b5-9949-96b8ab11df5e\") " pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.759708 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/4d54d7c2-5d4a-41b5-9949-96b8ab11df5e-ovs-socket\") pod \"nmstate-handler-c2qjr\" (UID: \"4d54d7c2-5d4a-41b5-9949-96b8ab11df5e\") " pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.759723 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvw4h\" (UniqueName: \"kubernetes.io/projected/b31a81b9-0e67-4858-ae54-304c17fd0495-kube-api-access-gvw4h\") pod \"nmstate-webhook-5f6d4c5ccb-8g56t\" (UID: \"b31a81b9-0e67-4858-ae54-304c17fd0495\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.759776 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/4d54d7c2-5d4a-41b5-9949-96b8ab11df5e-dbus-socket\") pod \"nmstate-handler-c2qjr\" (UID: \"4d54d7c2-5d4a-41b5-9949-96b8ab11df5e\") " pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.759793 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlrdg\" (UniqueName: \"kubernetes.io/projected/4d54d7c2-5d4a-41b5-9949-96b8ab11df5e-kube-api-access-zlrdg\") pod \"nmstate-handler-c2qjr\" (UID: \"4d54d7c2-5d4a-41b5-9949-96b8ab11df5e\") " pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.759814 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnpc2\" (UniqueName: \"kubernetes.io/projected/ad4e56ef-44e2-46c9-a609-808e4c96fa2e-kube-api-access-vnpc2\") pod \"nmstate-metrics-7f946cbc9-zvsm4\" (UID: \"ad4e56ef-44e2-46c9-a609-808e4c96fa2e\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zvsm4" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.765832 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8"] Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.843805 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.861326 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/4d54d7c2-5d4a-41b5-9949-96b8ab11df5e-nmstate-lock\") pod \"nmstate-handler-c2qjr\" (UID: \"4d54d7c2-5d4a-41b5-9949-96b8ab11df5e\") " pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.861602 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/4d54d7c2-5d4a-41b5-9949-96b8ab11df5e-ovs-socket\") pod \"nmstate-handler-c2qjr\" (UID: \"4d54d7c2-5d4a-41b5-9949-96b8ab11df5e\") " pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.861684 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvw4h\" (UniqueName: \"kubernetes.io/projected/b31a81b9-0e67-4858-ae54-304c17fd0495-kube-api-access-gvw4h\") pod \"nmstate-webhook-5f6d4c5ccb-8g56t\" (UID: \"b31a81b9-0e67-4858-ae54-304c17fd0495\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.861754 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6382b3e8-8f0e-421b-9612-0a398fd0f994-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-7nbg8\" (UID: \"6382b3e8-8f0e-421b-9612-0a398fd0f994\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.861827 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/6382b3e8-8f0e-421b-9612-0a398fd0f994-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-7nbg8\" (UID: \"6382b3e8-8f0e-421b-9612-0a398fd0f994\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.861899 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/4d54d7c2-5d4a-41b5-9949-96b8ab11df5e-dbus-socket\") pod \"nmstate-handler-c2qjr\" (UID: \"4d54d7c2-5d4a-41b5-9949-96b8ab11df5e\") " pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.861966 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlrdg\" (UniqueName: \"kubernetes.io/projected/4d54d7c2-5d4a-41b5-9949-96b8ab11df5e-kube-api-access-zlrdg\") pod \"nmstate-handler-c2qjr\" (UID: \"4d54d7c2-5d4a-41b5-9949-96b8ab11df5e\") " pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.862041 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnpc2\" (UniqueName: \"kubernetes.io/projected/ad4e56ef-44e2-46c9-a609-808e4c96fa2e-kube-api-access-vnpc2\") pod \"nmstate-metrics-7f946cbc9-zvsm4\" (UID: \"ad4e56ef-44e2-46c9-a609-808e4c96fa2e\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zvsm4" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.862125 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cdfb\" (UniqueName: \"kubernetes.io/projected/6382b3e8-8f0e-421b-9612-0a398fd0f994-kube-api-access-5cdfb\") pod \"nmstate-console-plugin-7fbb5f6569-7nbg8\" (UID: \"6382b3e8-8f0e-421b-9612-0a398fd0f994\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.862189 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b31a81b9-0e67-4858-ae54-304c17fd0495-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-8g56t\" (UID: \"b31a81b9-0e67-4858-ae54-304c17fd0495\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t" Dec 02 14:29:41 crc kubenswrapper[4902]: E1202 14:29:41.862335 4902 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.862386 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/4d54d7c2-5d4a-41b5-9949-96b8ab11df5e-nmstate-lock\") pod \"nmstate-handler-c2qjr\" (UID: \"4d54d7c2-5d4a-41b5-9949-96b8ab11df5e\") " pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.862436 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/4d54d7c2-5d4a-41b5-9949-96b8ab11df5e-ovs-socket\") pod \"nmstate-handler-c2qjr\" (UID: \"4d54d7c2-5d4a-41b5-9949-96b8ab11df5e\") " pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:41 crc kubenswrapper[4902]: E1202 14:29:41.862513 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b31a81b9-0e67-4858-ae54-304c17fd0495-tls-key-pair podName:b31a81b9-0e67-4858-ae54-304c17fd0495 nodeName:}" failed. No retries permitted until 2025-12-02 14:29:42.362492589 +0000 UTC m=+813.553801298 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/b31a81b9-0e67-4858-ae54-304c17fd0495-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-8g56t" (UID: "b31a81b9-0e67-4858-ae54-304c17fd0495") : secret "openshift-nmstate-webhook" not found Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.862943 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/4d54d7c2-5d4a-41b5-9949-96b8ab11df5e-dbus-socket\") pod \"nmstate-handler-c2qjr\" (UID: \"4d54d7c2-5d4a-41b5-9949-96b8ab11df5e\") " pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.896923 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnpc2\" (UniqueName: \"kubernetes.io/projected/ad4e56ef-44e2-46c9-a609-808e4c96fa2e-kube-api-access-vnpc2\") pod \"nmstate-metrics-7f946cbc9-zvsm4\" (UID: \"ad4e56ef-44e2-46c9-a609-808e4c96fa2e\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zvsm4" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.904173 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.905347 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvw4h\" (UniqueName: \"kubernetes.io/projected/b31a81b9-0e67-4858-ae54-304c17fd0495-kube-api-access-gvw4h\") pod \"nmstate-webhook-5f6d4c5ccb-8g56t\" (UID: \"b31a81b9-0e67-4858-ae54-304c17fd0495\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.908124 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlrdg\" (UniqueName: \"kubernetes.io/projected/4d54d7c2-5d4a-41b5-9949-96b8ab11df5e-kube-api-access-zlrdg\") pod \"nmstate-handler-c2qjr\" (UID: \"4d54d7c2-5d4a-41b5-9949-96b8ab11df5e\") " pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.912799 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zvsm4" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.959836 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.964160 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cdfb\" (UniqueName: \"kubernetes.io/projected/6382b3e8-8f0e-421b-9612-0a398fd0f994-kube-api-access-5cdfb\") pod \"nmstate-console-plugin-7fbb5f6569-7nbg8\" (UID: \"6382b3e8-8f0e-421b-9612-0a398fd0f994\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.964251 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6382b3e8-8f0e-421b-9612-0a398fd0f994-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-7nbg8\" (UID: \"6382b3e8-8f0e-421b-9612-0a398fd0f994\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.964279 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/6382b3e8-8f0e-421b-9612-0a398fd0f994-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-7nbg8\" (UID: \"6382b3e8-8f0e-421b-9612-0a398fd0f994\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.965176 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/6382b3e8-8f0e-421b-9612-0a398fd0f994-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-7nbg8\" (UID: \"6382b3e8-8f0e-421b-9612-0a398fd0f994\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.966259 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7468d54cd-p8d4m"] Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.967760 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6382b3e8-8f0e-421b-9612-0a398fd0f994-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-7nbg8\" (UID: \"6382b3e8-8f0e-421b-9612-0a398fd0f994\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.981824 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:41 crc kubenswrapper[4902]: I1202 14:29:41.989876 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7468d54cd-p8d4m"] Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.016269 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cdfb\" (UniqueName: \"kubernetes.io/projected/6382b3e8-8f0e-421b-9612-0a398fd0f994-kube-api-access-5cdfb\") pod \"nmstate-console-plugin-7fbb5f6569-7nbg8\" (UID: \"6382b3e8-8f0e-421b-9612-0a398fd0f994\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.065193 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98wnl\" (UniqueName: \"kubernetes.io/projected/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-kube-api-access-98wnl\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.065601 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-oauth-serving-cert\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.065633 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-console-serving-cert\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.065657 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-service-ca\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.065705 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-console-config\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.065524 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.065728 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-trusted-ca-bundle\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.065807 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-console-oauth-config\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.167477 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-console-serving-cert\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.167532 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-service-ca\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.167596 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-console-config\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.167626 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-trusted-ca-bundle\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.167668 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-console-oauth-config\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.167699 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98wnl\" (UniqueName: \"kubernetes.io/projected/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-kube-api-access-98wnl\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.167741 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-oauth-serving-cert\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.168602 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-service-ca\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.168619 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-oauth-serving-cert\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.169019 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-trusted-ca-bundle\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.169583 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-console-config\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.176919 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-console-serving-cert\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.185231 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-console-oauth-config\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.211063 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98wnl\" (UniqueName: \"kubernetes.io/projected/8f7c41c0-a51b-4cf1-9261-58a1d3c9b170-kube-api-access-98wnl\") pod \"console-7468d54cd-p8d4m\" (UID: \"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170\") " pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.345149 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.372809 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b31a81b9-0e67-4858-ae54-304c17fd0495-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-8g56t\" (UID: \"b31a81b9-0e67-4858-ae54-304c17fd0495\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.376686 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b31a81b9-0e67-4858-ae54-304c17fd0495-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-8g56t\" (UID: \"b31a81b9-0e67-4858-ae54-304c17fd0495\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.492060 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-zvsm4"] Dec 02 14:29:42 crc kubenswrapper[4902]: W1202 14:29:42.498496 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad4e56ef_44e2_46c9_a609_808e4c96fa2e.slice/crio-f34a488c8269a86712d1a0b92b832b2a84dc00852524a202043be7e8fcbc50fe WatchSource:0}: Error finding container f34a488c8269a86712d1a0b92b832b2a84dc00852524a202043be7e8fcbc50fe: Status 404 returned error can't find the container with id f34a488c8269a86712d1a0b92b832b2a84dc00852524a202043be7e8fcbc50fe Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.510019 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8"] Dec 02 14:29:42 crc kubenswrapper[4902]: W1202 14:29:42.513183 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6382b3e8_8f0e_421b_9612_0a398fd0f994.slice/crio-19ba6f9a7863edea65307367defdec95704da4ec52109b362bff5e1dbbb867ff WatchSource:0}: Error finding container 19ba6f9a7863edea65307367defdec95704da4ec52109b362bff5e1dbbb867ff: Status 404 returned error can't find the container with id 19ba6f9a7863edea65307367defdec95704da4ec52109b362bff5e1dbbb867ff Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.531604 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t" Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.647787 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7468d54cd-p8d4m"] Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.671188 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zvsm4" event={"ID":"ad4e56ef-44e2-46c9-a609-808e4c96fa2e","Type":"ContainerStarted","Data":"f34a488c8269a86712d1a0b92b832b2a84dc00852524a202043be7e8fcbc50fe"} Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.672276 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7468d54cd-p8d4m" event={"ID":"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170","Type":"ContainerStarted","Data":"005bf877a40c29fdfd72b3ca059fb68b10da5d3c3ee73c5ed465dca4a5dccb7c"} Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.673359 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8" event={"ID":"6382b3e8-8f0e-421b-9612-0a398fd0f994","Type":"ContainerStarted","Data":"19ba6f9a7863edea65307367defdec95704da4ec52109b362bff5e1dbbb867ff"} Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.674205 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-c2qjr" event={"ID":"4d54d7c2-5d4a-41b5-9949-96b8ab11df5e","Type":"ContainerStarted","Data":"5fde93acad2b49f910bf89adf800249c51077d207ed8d281674a95f49fad29bb"} Dec 02 14:29:42 crc kubenswrapper[4902]: I1202 14:29:42.941499 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t"] Dec 02 14:29:42 crc kubenswrapper[4902]: W1202 14:29:42.942912 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb31a81b9_0e67_4858_ae54_304c17fd0495.slice/crio-08f3b76c25604e38be122fbd987488139ca4256e5bc5ff327c49214747d395dc WatchSource:0}: Error finding container 08f3b76c25604e38be122fbd987488139ca4256e5bc5ff327c49214747d395dc: Status 404 returned error can't find the container with id 08f3b76c25604e38be122fbd987488139ca4256e5bc5ff327c49214747d395dc Dec 02 14:29:43 crc kubenswrapper[4902]: I1202 14:29:43.680524 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t" event={"ID":"b31a81b9-0e67-4858-ae54-304c17fd0495","Type":"ContainerStarted","Data":"08f3b76c25604e38be122fbd987488139ca4256e5bc5ff327c49214747d395dc"} Dec 02 14:29:43 crc kubenswrapper[4902]: I1202 14:29:43.681787 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7468d54cd-p8d4m" event={"ID":"8f7c41c0-a51b-4cf1-9261-58a1d3c9b170","Type":"ContainerStarted","Data":"29352d45c92c7a0d7956f03864ebe429d831cf2b7c8358fc9d66604792f5c8d8"} Dec 02 14:29:43 crc kubenswrapper[4902]: I1202 14:29:43.708350 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7468d54cd-p8d4m" podStartSLOduration=2.708322035 podStartE2EDuration="2.708322035s" podCreationTimestamp="2025-12-02 14:29:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:29:43.70534369 +0000 UTC m=+814.896652409" watchObservedRunningTime="2025-12-02 14:29:43.708322035 +0000 UTC m=+814.899630744" Dec 02 14:29:44 crc kubenswrapper[4902]: I1202 14:29:44.462285 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2vttl"] Dec 02 14:29:44 crc kubenswrapper[4902]: I1202 14:29:44.462507 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2vttl" podUID="ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" containerName="registry-server" containerID="cri-o://333056d0837ae3c7a6a7561feb9d50ec447576646b931ea0c8f3da9981f4d361" gracePeriod=2 Dec 02 14:29:44 crc kubenswrapper[4902]: I1202 14:29:44.689758 4902 generic.go:334] "Generic (PLEG): container finished" podID="ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" containerID="333056d0837ae3c7a6a7561feb9d50ec447576646b931ea0c8f3da9981f4d361" exitCode=0 Dec 02 14:29:44 crc kubenswrapper[4902]: I1202 14:29:44.689842 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vttl" event={"ID":"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb","Type":"ContainerDied","Data":"333056d0837ae3c7a6a7561feb9d50ec447576646b931ea0c8f3da9981f4d361"} Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.195921 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.314090 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zm88\" (UniqueName: \"kubernetes.io/projected/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-kube-api-access-4zm88\") pod \"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb\" (UID: \"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb\") " Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.314150 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-utilities\") pod \"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb\" (UID: \"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb\") " Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.314230 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-catalog-content\") pod \"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb\" (UID: \"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb\") " Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.315436 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-utilities" (OuterVolumeSpecName: "utilities") pod "ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" (UID: "ecd31dd7-18ae-4b49-8bef-8c997dd76ccb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.319686 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-kube-api-access-4zm88" (OuterVolumeSpecName: "kube-api-access-4zm88") pod "ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" (UID: "ecd31dd7-18ae-4b49-8bef-8c997dd76ccb"). InnerVolumeSpecName "kube-api-access-4zm88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.415422 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zm88\" (UniqueName: \"kubernetes.io/projected/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-kube-api-access-4zm88\") on node \"crc\" DevicePath \"\"" Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.415874 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.428197 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" (UID: "ecd31dd7-18ae-4b49-8bef-8c997dd76ccb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.517170 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.699387 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vttl" event={"ID":"ecd31dd7-18ae-4b49-8bef-8c997dd76ccb","Type":"ContainerDied","Data":"e3fe78f580f1b1e4b8196786f98988b603ea2e558de1997bee047a4a1fdd4fcc"} Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.699727 4902 scope.go:117] "RemoveContainer" containerID="333056d0837ae3c7a6a7561feb9d50ec447576646b931ea0c8f3da9981f4d361" Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.699443 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2vttl" Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.702358 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t" event={"ID":"b31a81b9-0e67-4858-ae54-304c17fd0495","Type":"ContainerStarted","Data":"141aba19886991505d6a16c515e3ff1800a18a762fa2512c3eb2b1a7c4cc70f9"} Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.702415 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t" Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.704609 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-c2qjr" event={"ID":"4d54d7c2-5d4a-41b5-9949-96b8ab11df5e","Type":"ContainerStarted","Data":"452d8866b0dbb228dc8ca7ee5afc5975ecdf00a1e1378a9ce5f1c71b8056d4e8"} Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.704728 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.706342 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zvsm4" event={"ID":"ad4e56ef-44e2-46c9-a609-808e4c96fa2e","Type":"ContainerStarted","Data":"94f122d811b456641d35496e00ba281c836776abda39ba6d1449b32ced669f9d"} Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.719669 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t" podStartSLOduration=2.676858511 podStartE2EDuration="4.719648555s" podCreationTimestamp="2025-12-02 14:29:41 +0000 UTC" firstStartedPulling="2025-12-02 14:29:42.945041027 +0000 UTC m=+814.136349736" lastFinishedPulling="2025-12-02 14:29:44.987831071 +0000 UTC m=+816.179139780" observedRunningTime="2025-12-02 14:29:45.717823493 +0000 UTC m=+816.909132202" watchObservedRunningTime="2025-12-02 14:29:45.719648555 +0000 UTC m=+816.910957264" Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.734793 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2vttl"] Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.738952 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2vttl"] Dec 02 14:29:45 crc kubenswrapper[4902]: I1202 14:29:45.753332 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-c2qjr" podStartSLOduration=1.81386738 podStartE2EDuration="4.753315511s" podCreationTimestamp="2025-12-02 14:29:41 +0000 UTC" firstStartedPulling="2025-12-02 14:29:42.048267537 +0000 UTC m=+813.239576246" lastFinishedPulling="2025-12-02 14:29:44.987715668 +0000 UTC m=+816.179024377" observedRunningTime="2025-12-02 14:29:45.746029234 +0000 UTC m=+816.937337963" watchObservedRunningTime="2025-12-02 14:29:45.753315511 +0000 UTC m=+816.944624220" Dec 02 14:29:46 crc kubenswrapper[4902]: I1202 14:29:46.232909 4902 scope.go:117] "RemoveContainer" containerID="36ef1633d2240fee10f05ae21fb892d932b179ec0546d8139b9f61079ecb26b2" Dec 02 14:29:46 crc kubenswrapper[4902]: I1202 14:29:46.281072 4902 scope.go:117] "RemoveContainer" containerID="0556468dd57cd6f88db5b9925734540525ed61e59c2baf2804a3a1238e411242" Dec 02 14:29:46 crc kubenswrapper[4902]: I1202 14:29:46.712737 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8" event={"ID":"6382b3e8-8f0e-421b-9612-0a398fd0f994","Type":"ContainerStarted","Data":"c37415b4d433409198ffd4b72f3b8b455882f8e5372bb330f12cb65a65657054"} Dec 02 14:29:46 crc kubenswrapper[4902]: I1202 14:29:46.734734 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7nbg8" podStartSLOduration=1.966259461 podStartE2EDuration="5.734715066s" podCreationTimestamp="2025-12-02 14:29:41 +0000 UTC" firstStartedPulling="2025-12-02 14:29:42.515059291 +0000 UTC m=+813.706367990" lastFinishedPulling="2025-12-02 14:29:46.283514886 +0000 UTC m=+817.474823595" observedRunningTime="2025-12-02 14:29:46.730540517 +0000 UTC m=+817.921849226" watchObservedRunningTime="2025-12-02 14:29:46.734715066 +0000 UTC m=+817.926023775" Dec 02 14:29:47 crc kubenswrapper[4902]: I1202 14:29:47.116664 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" path="/var/lib/kubelet/pods/ecd31dd7-18ae-4b49-8bef-8c997dd76ccb/volumes" Dec 02 14:29:47 crc kubenswrapper[4902]: I1202 14:29:47.721702 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zvsm4" event={"ID":"ad4e56ef-44e2-46c9-a609-808e4c96fa2e","Type":"ContainerStarted","Data":"b4cc8ed857e27f930ac1f3af4927ee6a71c3fc9303d12c730a5b90a17836a8e4"} Dec 02 14:29:47 crc kubenswrapper[4902]: I1202 14:29:47.735454 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zvsm4" podStartSLOduration=1.695065295 podStartE2EDuration="6.73543873s" podCreationTimestamp="2025-12-02 14:29:41 +0000 UTC" firstStartedPulling="2025-12-02 14:29:42.503048819 +0000 UTC m=+813.694357528" lastFinishedPulling="2025-12-02 14:29:47.543422254 +0000 UTC m=+818.734730963" observedRunningTime="2025-12-02 14:29:47.734607176 +0000 UTC m=+818.925915885" watchObservedRunningTime="2025-12-02 14:29:47.73543873 +0000 UTC m=+818.926747439" Dec 02 14:29:51 crc kubenswrapper[4902]: I1202 14:29:51.988641 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-c2qjr" Dec 02 14:29:52 crc kubenswrapper[4902]: I1202 14:29:52.345496 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:52 crc kubenswrapper[4902]: I1202 14:29:52.345552 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:52 crc kubenswrapper[4902]: I1202 14:29:52.350988 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:52 crc kubenswrapper[4902]: I1202 14:29:52.750689 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7468d54cd-p8d4m" Dec 02 14:29:52 crc kubenswrapper[4902]: I1202 14:29:52.820286 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-8bcjv"] Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.174701 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb"] Dec 02 14:30:00 crc kubenswrapper[4902]: E1202 14:30:00.175405 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" containerName="registry-server" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.175427 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" containerName="registry-server" Dec 02 14:30:00 crc kubenswrapper[4902]: E1202 14:30:00.175452 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" containerName="extract-content" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.175463 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" containerName="extract-content" Dec 02 14:30:00 crc kubenswrapper[4902]: E1202 14:30:00.175493 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" containerName="extract-utilities" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.175507 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" containerName="extract-utilities" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.175682 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecd31dd7-18ae-4b49-8bef-8c997dd76ccb" containerName="registry-server" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.176291 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.178799 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.183239 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.189554 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb"] Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.363030 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bh4hp\" (UniqueName: \"kubernetes.io/projected/6df3b0b6-51e8-426d-a1e5-b03611c256bb-kube-api-access-bh4hp\") pod \"collect-profiles-29411430-dx2wb\" (UID: \"6df3b0b6-51e8-426d-a1e5-b03611c256bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.363168 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6df3b0b6-51e8-426d-a1e5-b03611c256bb-secret-volume\") pod \"collect-profiles-29411430-dx2wb\" (UID: \"6df3b0b6-51e8-426d-a1e5-b03611c256bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.363222 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6df3b0b6-51e8-426d-a1e5-b03611c256bb-config-volume\") pod \"collect-profiles-29411430-dx2wb\" (UID: \"6df3b0b6-51e8-426d-a1e5-b03611c256bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.464155 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6df3b0b6-51e8-426d-a1e5-b03611c256bb-secret-volume\") pod \"collect-profiles-29411430-dx2wb\" (UID: \"6df3b0b6-51e8-426d-a1e5-b03611c256bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.464209 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6df3b0b6-51e8-426d-a1e5-b03611c256bb-config-volume\") pod \"collect-profiles-29411430-dx2wb\" (UID: \"6df3b0b6-51e8-426d-a1e5-b03611c256bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.464254 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bh4hp\" (UniqueName: \"kubernetes.io/projected/6df3b0b6-51e8-426d-a1e5-b03611c256bb-kube-api-access-bh4hp\") pod \"collect-profiles-29411430-dx2wb\" (UID: \"6df3b0b6-51e8-426d-a1e5-b03611c256bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.465344 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6df3b0b6-51e8-426d-a1e5-b03611c256bb-config-volume\") pod \"collect-profiles-29411430-dx2wb\" (UID: \"6df3b0b6-51e8-426d-a1e5-b03611c256bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.468836 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6df3b0b6-51e8-426d-a1e5-b03611c256bb-secret-volume\") pod \"collect-profiles-29411430-dx2wb\" (UID: \"6df3b0b6-51e8-426d-a1e5-b03611c256bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.479716 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bh4hp\" (UniqueName: \"kubernetes.io/projected/6df3b0b6-51e8-426d-a1e5-b03611c256bb-kube-api-access-bh4hp\") pod \"collect-profiles-29411430-dx2wb\" (UID: \"6df3b0b6-51e8-426d-a1e5-b03611c256bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.495839 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" Dec 02 14:30:00 crc kubenswrapper[4902]: W1202 14:30:00.713698 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6df3b0b6_51e8_426d_a1e5_b03611c256bb.slice/crio-868dc52a0c164af7481b841c139c657e9443579e3d4436270499eedded126915 WatchSource:0}: Error finding container 868dc52a0c164af7481b841c139c657e9443579e3d4436270499eedded126915: Status 404 returned error can't find the container with id 868dc52a0c164af7481b841c139c657e9443579e3d4436270499eedded126915 Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.719007 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb"] Dec 02 14:30:00 crc kubenswrapper[4902]: I1202 14:30:00.795547 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" event={"ID":"6df3b0b6-51e8-426d-a1e5-b03611c256bb","Type":"ContainerStarted","Data":"868dc52a0c164af7481b841c139c657e9443579e3d4436270499eedded126915"} Dec 02 14:30:01 crc kubenswrapper[4902]: I1202 14:30:01.804250 4902 generic.go:334] "Generic (PLEG): container finished" podID="6df3b0b6-51e8-426d-a1e5-b03611c256bb" containerID="0c2562be63138dd01b943189bdd0c76333b7ec8b843339f727a7a38defe258b7" exitCode=0 Dec 02 14:30:01 crc kubenswrapper[4902]: I1202 14:30:01.804528 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" event={"ID":"6df3b0b6-51e8-426d-a1e5-b03611c256bb","Type":"ContainerDied","Data":"0c2562be63138dd01b943189bdd0c76333b7ec8b843339f727a7a38defe258b7"} Dec 02 14:30:02 crc kubenswrapper[4902]: I1202 14:30:02.544046 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-8g56t" Dec 02 14:30:03 crc kubenswrapper[4902]: I1202 14:30:03.050697 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" Dec 02 14:30:03 crc kubenswrapper[4902]: I1202 14:30:03.224552 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bh4hp\" (UniqueName: \"kubernetes.io/projected/6df3b0b6-51e8-426d-a1e5-b03611c256bb-kube-api-access-bh4hp\") pod \"6df3b0b6-51e8-426d-a1e5-b03611c256bb\" (UID: \"6df3b0b6-51e8-426d-a1e5-b03611c256bb\") " Dec 02 14:30:03 crc kubenswrapper[4902]: I1202 14:30:03.224720 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6df3b0b6-51e8-426d-a1e5-b03611c256bb-config-volume\") pod \"6df3b0b6-51e8-426d-a1e5-b03611c256bb\" (UID: \"6df3b0b6-51e8-426d-a1e5-b03611c256bb\") " Dec 02 14:30:03 crc kubenswrapper[4902]: I1202 14:30:03.224822 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6df3b0b6-51e8-426d-a1e5-b03611c256bb-secret-volume\") pod \"6df3b0b6-51e8-426d-a1e5-b03611c256bb\" (UID: \"6df3b0b6-51e8-426d-a1e5-b03611c256bb\") " Dec 02 14:30:03 crc kubenswrapper[4902]: I1202 14:30:03.226519 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6df3b0b6-51e8-426d-a1e5-b03611c256bb-config-volume" (OuterVolumeSpecName: "config-volume") pod "6df3b0b6-51e8-426d-a1e5-b03611c256bb" (UID: "6df3b0b6-51e8-426d-a1e5-b03611c256bb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:30:03 crc kubenswrapper[4902]: I1202 14:30:03.231771 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6df3b0b6-51e8-426d-a1e5-b03611c256bb-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6df3b0b6-51e8-426d-a1e5-b03611c256bb" (UID: "6df3b0b6-51e8-426d-a1e5-b03611c256bb"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:30:03 crc kubenswrapper[4902]: I1202 14:30:03.234590 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6df3b0b6-51e8-426d-a1e5-b03611c256bb-kube-api-access-bh4hp" (OuterVolumeSpecName: "kube-api-access-bh4hp") pod "6df3b0b6-51e8-426d-a1e5-b03611c256bb" (UID: "6df3b0b6-51e8-426d-a1e5-b03611c256bb"). InnerVolumeSpecName "kube-api-access-bh4hp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:30:03 crc kubenswrapper[4902]: I1202 14:30:03.326810 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bh4hp\" (UniqueName: \"kubernetes.io/projected/6df3b0b6-51e8-426d-a1e5-b03611c256bb-kube-api-access-bh4hp\") on node \"crc\" DevicePath \"\"" Dec 02 14:30:03 crc kubenswrapper[4902]: I1202 14:30:03.326857 4902 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6df3b0b6-51e8-426d-a1e5-b03611c256bb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 14:30:03 crc kubenswrapper[4902]: I1202 14:30:03.326869 4902 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6df3b0b6-51e8-426d-a1e5-b03611c256bb-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 14:30:03 crc kubenswrapper[4902]: I1202 14:30:03.817839 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" event={"ID":"6df3b0b6-51e8-426d-a1e5-b03611c256bb","Type":"ContainerDied","Data":"868dc52a0c164af7481b841c139c657e9443579e3d4436270499eedded126915"} Dec 02 14:30:03 crc kubenswrapper[4902]: I1202 14:30:03.817934 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb" Dec 02 14:30:03 crc kubenswrapper[4902]: I1202 14:30:03.817918 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="868dc52a0c164af7481b841c139c657e9443579e3d4436270499eedded126915" Dec 02 14:30:04 crc kubenswrapper[4902]: I1202 14:30:04.732053 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:30:04 crc kubenswrapper[4902]: I1202 14:30:04.733502 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:30:17 crc kubenswrapper[4902]: I1202 14:30:17.902841 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-8bcjv" podUID="e7263104-6357-4d41-a133-faf27fb96fd4" containerName="console" containerID="cri-o://ed5c0720557b37cc99e12d767b4387172f8bbc6edba471c9717c6f1c1cb17251" gracePeriod=15 Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.906141 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-8bcjv_e7263104-6357-4d41-a133-faf27fb96fd4/console/0.log" Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.906629 4902 generic.go:334] "Generic (PLEG): container finished" podID="e7263104-6357-4d41-a133-faf27fb96fd4" containerID="ed5c0720557b37cc99e12d767b4387172f8bbc6edba471c9717c6f1c1cb17251" exitCode=2 Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.906657 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8bcjv" event={"ID":"e7263104-6357-4d41-a133-faf27fb96fd4","Type":"ContainerDied","Data":"ed5c0720557b37cc99e12d767b4387172f8bbc6edba471c9717c6f1c1cb17251"} Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.906690 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8bcjv" event={"ID":"e7263104-6357-4d41-a133-faf27fb96fd4","Type":"ContainerDied","Data":"fac84db31c77083b7325072325935d911e6484649a6f590cc94d4245d82531a9"} Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.906709 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fac84db31c77083b7325072325935d911e6484649a6f590cc94d4245d82531a9" Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.917714 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-8bcjv_e7263104-6357-4d41-a133-faf27fb96fd4/console/0.log" Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.917770 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.973300 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-trusted-ca-bundle\") pod \"e7263104-6357-4d41-a133-faf27fb96fd4\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.973352 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-oauth-serving-cert\") pod \"e7263104-6357-4d41-a133-faf27fb96fd4\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.973386 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-console-config\") pod \"e7263104-6357-4d41-a133-faf27fb96fd4\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.973418 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgmc\" (UniqueName: \"kubernetes.io/projected/e7263104-6357-4d41-a133-faf27fb96fd4-kube-api-access-xcgmc\") pod \"e7263104-6357-4d41-a133-faf27fb96fd4\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.973446 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e7263104-6357-4d41-a133-faf27fb96fd4-console-serving-cert\") pod \"e7263104-6357-4d41-a133-faf27fb96fd4\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.973506 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-service-ca\") pod \"e7263104-6357-4d41-a133-faf27fb96fd4\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.973523 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e7263104-6357-4d41-a133-faf27fb96fd4-console-oauth-config\") pod \"e7263104-6357-4d41-a133-faf27fb96fd4\" (UID: \"e7263104-6357-4d41-a133-faf27fb96fd4\") " Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.974463 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-console-config" (OuterVolumeSpecName: "console-config") pod "e7263104-6357-4d41-a133-faf27fb96fd4" (UID: "e7263104-6357-4d41-a133-faf27fb96fd4"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.974474 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "e7263104-6357-4d41-a133-faf27fb96fd4" (UID: "e7263104-6357-4d41-a133-faf27fb96fd4"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.974860 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "e7263104-6357-4d41-a133-faf27fb96fd4" (UID: "e7263104-6357-4d41-a133-faf27fb96fd4"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.975174 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-service-ca" (OuterVolumeSpecName: "service-ca") pod "e7263104-6357-4d41-a133-faf27fb96fd4" (UID: "e7263104-6357-4d41-a133-faf27fb96fd4"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.979620 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7263104-6357-4d41-a133-faf27fb96fd4-kube-api-access-xcgmc" (OuterVolumeSpecName: "kube-api-access-xcgmc") pod "e7263104-6357-4d41-a133-faf27fb96fd4" (UID: "e7263104-6357-4d41-a133-faf27fb96fd4"). InnerVolumeSpecName "kube-api-access-xcgmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.979631 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7263104-6357-4d41-a133-faf27fb96fd4-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "e7263104-6357-4d41-a133-faf27fb96fd4" (UID: "e7263104-6357-4d41-a133-faf27fb96fd4"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:30:18 crc kubenswrapper[4902]: I1202 14:30:18.980274 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7263104-6357-4d41-a133-faf27fb96fd4-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "e7263104-6357-4d41-a133-faf27fb96fd4" (UID: "e7263104-6357-4d41-a133-faf27fb96fd4"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:30:19 crc kubenswrapper[4902]: I1202 14:30:19.075173 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgmc\" (UniqueName: \"kubernetes.io/projected/e7263104-6357-4d41-a133-faf27fb96fd4-kube-api-access-xcgmc\") on node \"crc\" DevicePath \"\"" Dec 02 14:30:19 crc kubenswrapper[4902]: I1202 14:30:19.075776 4902 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e7263104-6357-4d41-a133-faf27fb96fd4-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:30:19 crc kubenswrapper[4902]: I1202 14:30:19.075838 4902 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:30:19 crc kubenswrapper[4902]: I1202 14:30:19.075908 4902 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e7263104-6357-4d41-a133-faf27fb96fd4-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:30:19 crc kubenswrapper[4902]: I1202 14:30:19.075960 4902 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:30:19 crc kubenswrapper[4902]: I1202 14:30:19.076311 4902 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 14:30:19 crc kubenswrapper[4902]: I1202 14:30:19.076378 4902 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e7263104-6357-4d41-a133-faf27fb96fd4-console-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:30:19 crc kubenswrapper[4902]: I1202 14:30:19.912050 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8bcjv" Dec 02 14:30:19 crc kubenswrapper[4902]: I1202 14:30:19.930375 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-8bcjv"] Dec 02 14:30:19 crc kubenswrapper[4902]: I1202 14:30:19.938037 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-8bcjv"] Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.113367 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7263104-6357-4d41-a133-faf27fb96fd4" path="/var/lib/kubelet/pods/e7263104-6357-4d41-a133-faf27fb96fd4/volumes" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.513244 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6"] Dec 02 14:30:21 crc kubenswrapper[4902]: E1202 14:30:21.513466 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6df3b0b6-51e8-426d-a1e5-b03611c256bb" containerName="collect-profiles" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.513479 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="6df3b0b6-51e8-426d-a1e5-b03611c256bb" containerName="collect-profiles" Dec 02 14:30:21 crc kubenswrapper[4902]: E1202 14:30:21.513500 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7263104-6357-4d41-a133-faf27fb96fd4" containerName="console" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.513506 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7263104-6357-4d41-a133-faf27fb96fd4" containerName="console" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.513629 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="6df3b0b6-51e8-426d-a1e5-b03611c256bb" containerName="collect-profiles" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.513644 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7263104-6357-4d41-a133-faf27fb96fd4" containerName="console" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.514368 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.517229 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.524049 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6"] Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.606117 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9xr4\" (UniqueName: \"kubernetes.io/projected/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-kube-api-access-r9xr4\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6\" (UID: \"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.606225 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6\" (UID: \"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.606298 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6\" (UID: \"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.707207 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6\" (UID: \"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.707334 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9xr4\" (UniqueName: \"kubernetes.io/projected/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-kube-api-access-r9xr4\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6\" (UID: \"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.707397 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6\" (UID: \"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.708058 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6\" (UID: \"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.708275 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6\" (UID: \"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.739918 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9xr4\" (UniqueName: \"kubernetes.io/projected/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-kube-api-access-r9xr4\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6\" (UID: \"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" Dec 02 14:30:21 crc kubenswrapper[4902]: I1202 14:30:21.828441 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" Dec 02 14:30:22 crc kubenswrapper[4902]: I1202 14:30:22.077257 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6"] Dec 02 14:30:22 crc kubenswrapper[4902]: I1202 14:30:22.936639 4902 generic.go:334] "Generic (PLEG): container finished" podID="b5fb7258-c7bd-4d20-b7a8-d636f970a8ac" containerID="8e7b9f65a26759578f7c925eb82ffa77277efcd9acada72f5edc4783c3ca7ab4" exitCode=0 Dec 02 14:30:22 crc kubenswrapper[4902]: I1202 14:30:22.936677 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" event={"ID":"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac","Type":"ContainerDied","Data":"8e7b9f65a26759578f7c925eb82ffa77277efcd9acada72f5edc4783c3ca7ab4"} Dec 02 14:30:22 crc kubenswrapper[4902]: I1202 14:30:22.936721 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" event={"ID":"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac","Type":"ContainerStarted","Data":"079a9aa85e1314d10d6e5e488796f91fedb10cbacbe6a28f19711be7ddb24fe8"} Dec 02 14:30:27 crc kubenswrapper[4902]: I1202 14:30:27.967397 4902 generic.go:334] "Generic (PLEG): container finished" podID="b5fb7258-c7bd-4d20-b7a8-d636f970a8ac" containerID="3973e2f9e223fad46ad974141fffb8740f15b58fccaa28146401506891f490e1" exitCode=0 Dec 02 14:30:27 crc kubenswrapper[4902]: I1202 14:30:27.967440 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" event={"ID":"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac","Type":"ContainerDied","Data":"3973e2f9e223fad46ad974141fffb8740f15b58fccaa28146401506891f490e1"} Dec 02 14:30:29 crc kubenswrapper[4902]: I1202 14:30:29.981423 4902 generic.go:334] "Generic (PLEG): container finished" podID="b5fb7258-c7bd-4d20-b7a8-d636f970a8ac" containerID="cfc82f1cd0b93d6b5b757520fe2e4ea1b814b4692b3be075dac192701a275d12" exitCode=0 Dec 02 14:30:29 crc kubenswrapper[4902]: I1202 14:30:29.981498 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" event={"ID":"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac","Type":"ContainerDied","Data":"cfc82f1cd0b93d6b5b757520fe2e4ea1b814b4692b3be075dac192701a275d12"} Dec 02 14:30:31 crc kubenswrapper[4902]: I1202 14:30:31.248669 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" Dec 02 14:30:31 crc kubenswrapper[4902]: I1202 14:30:31.396327 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-bundle\") pod \"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac\" (UID: \"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac\") " Dec 02 14:30:31 crc kubenswrapper[4902]: I1202 14:30:31.396463 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-util\") pod \"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac\" (UID: \"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac\") " Dec 02 14:30:31 crc kubenswrapper[4902]: I1202 14:30:31.396600 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9xr4\" (UniqueName: \"kubernetes.io/projected/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-kube-api-access-r9xr4\") pod \"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac\" (UID: \"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac\") " Dec 02 14:30:31 crc kubenswrapper[4902]: I1202 14:30:31.399385 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-bundle" (OuterVolumeSpecName: "bundle") pod "b5fb7258-c7bd-4d20-b7a8-d636f970a8ac" (UID: "b5fb7258-c7bd-4d20-b7a8-d636f970a8ac"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:30:31 crc kubenswrapper[4902]: I1202 14:30:31.402403 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-kube-api-access-r9xr4" (OuterVolumeSpecName: "kube-api-access-r9xr4") pod "b5fb7258-c7bd-4d20-b7a8-d636f970a8ac" (UID: "b5fb7258-c7bd-4d20-b7a8-d636f970a8ac"). InnerVolumeSpecName "kube-api-access-r9xr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:30:31 crc kubenswrapper[4902]: I1202 14:30:31.406939 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-util" (OuterVolumeSpecName: "util") pod "b5fb7258-c7bd-4d20-b7a8-d636f970a8ac" (UID: "b5fb7258-c7bd-4d20-b7a8-d636f970a8ac"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:30:31 crc kubenswrapper[4902]: I1202 14:30:31.501743 4902 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:30:31 crc kubenswrapper[4902]: I1202 14:30:31.501809 4902 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-util\") on node \"crc\" DevicePath \"\"" Dec 02 14:30:31 crc kubenswrapper[4902]: I1202 14:30:31.501831 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9xr4\" (UniqueName: \"kubernetes.io/projected/b5fb7258-c7bd-4d20-b7a8-d636f970a8ac-kube-api-access-r9xr4\") on node \"crc\" DevicePath \"\"" Dec 02 14:30:31 crc kubenswrapper[4902]: I1202 14:30:31.995079 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" event={"ID":"b5fb7258-c7bd-4d20-b7a8-d636f970a8ac","Type":"ContainerDied","Data":"079a9aa85e1314d10d6e5e488796f91fedb10cbacbe6a28f19711be7ddb24fe8"} Dec 02 14:30:31 crc kubenswrapper[4902]: I1202 14:30:31.995135 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="079a9aa85e1314d10d6e5e488796f91fedb10cbacbe6a28f19711be7ddb24fe8" Dec 02 14:30:31 crc kubenswrapper[4902]: I1202 14:30:31.995138 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6" Dec 02 14:30:34 crc kubenswrapper[4902]: I1202 14:30:34.731267 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:30:34 crc kubenswrapper[4902]: I1202 14:30:34.731662 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:30:34 crc kubenswrapper[4902]: I1202 14:30:34.731934 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:30:34 crc kubenswrapper[4902]: I1202 14:30:34.732692 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4d3cfcf826d5c881989febb8098e15f6289e747eef104be9d1459d767e21b0fc"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 14:30:34 crc kubenswrapper[4902]: I1202 14:30:34.732772 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://4d3cfcf826d5c881989febb8098e15f6289e747eef104be9d1459d767e21b0fc" gracePeriod=600 Dec 02 14:30:36 crc kubenswrapper[4902]: I1202 14:30:36.022181 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="4d3cfcf826d5c881989febb8098e15f6289e747eef104be9d1459d767e21b0fc" exitCode=0 Dec 02 14:30:36 crc kubenswrapper[4902]: I1202 14:30:36.022240 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"4d3cfcf826d5c881989febb8098e15f6289e747eef104be9d1459d767e21b0fc"} Dec 02 14:30:36 crc kubenswrapper[4902]: I1202 14:30:36.022510 4902 scope.go:117] "RemoveContainer" containerID="118552f466a40a8de3c430e2d713a73773e397381466b0fec04a29e1d45b39b2" Dec 02 14:30:37 crc kubenswrapper[4902]: I1202 14:30:37.032394 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"fa3b2259dc7978de3e2c42de1d4c0b7a4d6a7a518fcd2965c20c0c6d1af8af33"} Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.115062 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7"] Dec 02 14:30:41 crc kubenswrapper[4902]: E1202 14:30:41.115749 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5fb7258-c7bd-4d20-b7a8-d636f970a8ac" containerName="util" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.115762 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5fb7258-c7bd-4d20-b7a8-d636f970a8ac" containerName="util" Dec 02 14:30:41 crc kubenswrapper[4902]: E1202 14:30:41.115775 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5fb7258-c7bd-4d20-b7a8-d636f970a8ac" containerName="extract" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.115781 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5fb7258-c7bd-4d20-b7a8-d636f970a8ac" containerName="extract" Dec 02 14:30:41 crc kubenswrapper[4902]: E1202 14:30:41.115793 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5fb7258-c7bd-4d20-b7a8-d636f970a8ac" containerName="pull" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.115800 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5fb7258-c7bd-4d20-b7a8-d636f970a8ac" containerName="pull" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.115901 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5fb7258-c7bd-4d20-b7a8-d636f970a8ac" containerName="extract" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.116293 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.119439 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.119696 4902 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.120029 4902 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-4pzk2" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.120169 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.120880 4902 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.130467 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7"] Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.245531 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4l45\" (UniqueName: \"kubernetes.io/projected/b75cd83c-20e7-42ea-a0f5-be6d28430a2e-kube-api-access-b4l45\") pod \"metallb-operator-controller-manager-644fb8ffcc-frgm7\" (UID: \"b75cd83c-20e7-42ea-a0f5-be6d28430a2e\") " pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.245602 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b75cd83c-20e7-42ea-a0f5-be6d28430a2e-webhook-cert\") pod \"metallb-operator-controller-manager-644fb8ffcc-frgm7\" (UID: \"b75cd83c-20e7-42ea-a0f5-be6d28430a2e\") " pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.245638 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b75cd83c-20e7-42ea-a0f5-be6d28430a2e-apiservice-cert\") pod \"metallb-operator-controller-manager-644fb8ffcc-frgm7\" (UID: \"b75cd83c-20e7-42ea-a0f5-be6d28430a2e\") " pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.347584 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4l45\" (UniqueName: \"kubernetes.io/projected/b75cd83c-20e7-42ea-a0f5-be6d28430a2e-kube-api-access-b4l45\") pod \"metallb-operator-controller-manager-644fb8ffcc-frgm7\" (UID: \"b75cd83c-20e7-42ea-a0f5-be6d28430a2e\") " pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.347655 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b75cd83c-20e7-42ea-a0f5-be6d28430a2e-webhook-cert\") pod \"metallb-operator-controller-manager-644fb8ffcc-frgm7\" (UID: \"b75cd83c-20e7-42ea-a0f5-be6d28430a2e\") " pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.347689 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b75cd83c-20e7-42ea-a0f5-be6d28430a2e-apiservice-cert\") pod \"metallb-operator-controller-manager-644fb8ffcc-frgm7\" (UID: \"b75cd83c-20e7-42ea-a0f5-be6d28430a2e\") " pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.354000 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b75cd83c-20e7-42ea-a0f5-be6d28430a2e-apiservice-cert\") pod \"metallb-operator-controller-manager-644fb8ffcc-frgm7\" (UID: \"b75cd83c-20e7-42ea-a0f5-be6d28430a2e\") " pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.354355 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b75cd83c-20e7-42ea-a0f5-be6d28430a2e-webhook-cert\") pod \"metallb-operator-controller-manager-644fb8ffcc-frgm7\" (UID: \"b75cd83c-20e7-42ea-a0f5-be6d28430a2e\") " pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.372241 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4l45\" (UniqueName: \"kubernetes.io/projected/b75cd83c-20e7-42ea-a0f5-be6d28430a2e-kube-api-access-b4l45\") pod \"metallb-operator-controller-manager-644fb8ffcc-frgm7\" (UID: \"b75cd83c-20e7-42ea-a0f5-be6d28430a2e\") " pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.435447 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.437449 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw"] Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.438120 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.440923 4902 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.441036 4902 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.441247 4902 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-g488v" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.448713 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/25920d0f-ef42-4402-82a1-c643307041f5-webhook-cert\") pod \"metallb-operator-webhook-server-56874848dd-bf6qw\" (UID: \"25920d0f-ef42-4402-82a1-c643307041f5\") " pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.448871 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfwl2\" (UniqueName: \"kubernetes.io/projected/25920d0f-ef42-4402-82a1-c643307041f5-kube-api-access-lfwl2\") pod \"metallb-operator-webhook-server-56874848dd-bf6qw\" (UID: \"25920d0f-ef42-4402-82a1-c643307041f5\") " pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.448904 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/25920d0f-ef42-4402-82a1-c643307041f5-apiservice-cert\") pod \"metallb-operator-webhook-server-56874848dd-bf6qw\" (UID: \"25920d0f-ef42-4402-82a1-c643307041f5\") " pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.456906 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw"] Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.550052 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/25920d0f-ef42-4402-82a1-c643307041f5-webhook-cert\") pod \"metallb-operator-webhook-server-56874848dd-bf6qw\" (UID: \"25920d0f-ef42-4402-82a1-c643307041f5\") " pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.550349 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfwl2\" (UniqueName: \"kubernetes.io/projected/25920d0f-ef42-4402-82a1-c643307041f5-kube-api-access-lfwl2\") pod \"metallb-operator-webhook-server-56874848dd-bf6qw\" (UID: \"25920d0f-ef42-4402-82a1-c643307041f5\") " pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.550371 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/25920d0f-ef42-4402-82a1-c643307041f5-apiservice-cert\") pod \"metallb-operator-webhook-server-56874848dd-bf6qw\" (UID: \"25920d0f-ef42-4402-82a1-c643307041f5\") " pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.555155 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/25920d0f-ef42-4402-82a1-c643307041f5-apiservice-cert\") pod \"metallb-operator-webhook-server-56874848dd-bf6qw\" (UID: \"25920d0f-ef42-4402-82a1-c643307041f5\") " pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.560879 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/25920d0f-ef42-4402-82a1-c643307041f5-webhook-cert\") pod \"metallb-operator-webhook-server-56874848dd-bf6qw\" (UID: \"25920d0f-ef42-4402-82a1-c643307041f5\") " pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.568962 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfwl2\" (UniqueName: \"kubernetes.io/projected/25920d0f-ef42-4402-82a1-c643307041f5-kube-api-access-lfwl2\") pod \"metallb-operator-webhook-server-56874848dd-bf6qw\" (UID: \"25920d0f-ef42-4402-82a1-c643307041f5\") " pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.689950 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7"] Dec 02 14:30:41 crc kubenswrapper[4902]: I1202 14:30:41.796651 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" Dec 02 14:30:42 crc kubenswrapper[4902]: I1202 14:30:42.041667 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw"] Dec 02 14:30:42 crc kubenswrapper[4902]: W1202 14:30:42.049868 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25920d0f_ef42_4402_82a1_c643307041f5.slice/crio-2b9ab8f7de362750f929e8d98f3985639a47414d68c25390a15b5c5aaf05779b WatchSource:0}: Error finding container 2b9ab8f7de362750f929e8d98f3985639a47414d68c25390a15b5c5aaf05779b: Status 404 returned error can't find the container with id 2b9ab8f7de362750f929e8d98f3985639a47414d68c25390a15b5c5aaf05779b Dec 02 14:30:42 crc kubenswrapper[4902]: I1202 14:30:42.072801 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" event={"ID":"25920d0f-ef42-4402-82a1-c643307041f5","Type":"ContainerStarted","Data":"2b9ab8f7de362750f929e8d98f3985639a47414d68c25390a15b5c5aaf05779b"} Dec 02 14:30:42 crc kubenswrapper[4902]: I1202 14:30:42.074011 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" event={"ID":"b75cd83c-20e7-42ea-a0f5-be6d28430a2e","Type":"ContainerStarted","Data":"9625ef399dd2314258e3c01b20e296e10fb8efa4c71fb81e33a3974094173a4c"} Dec 02 14:30:47 crc kubenswrapper[4902]: I1202 14:30:47.113327 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" Dec 02 14:30:47 crc kubenswrapper[4902]: I1202 14:30:47.113944 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" Dec 02 14:30:47 crc kubenswrapper[4902]: I1202 14:30:47.113961 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" event={"ID":"25920d0f-ef42-4402-82a1-c643307041f5","Type":"ContainerStarted","Data":"1c36b39c281636f32784c597954de40cfbceb0a37d5cc6f9ddb9d48f7d6edee2"} Dec 02 14:30:47 crc kubenswrapper[4902]: I1202 14:30:47.114004 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" event={"ID":"b75cd83c-20e7-42ea-a0f5-be6d28430a2e","Type":"ContainerStarted","Data":"114e082b1267574cd2b5bc45861b7f98cae134ccdf8592576a7d0cf2bd2c6b58"} Dec 02 14:30:47 crc kubenswrapper[4902]: I1202 14:30:47.128375 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" podStartSLOduration=1.273744204 podStartE2EDuration="6.128356661s" podCreationTimestamp="2025-12-02 14:30:41 +0000 UTC" firstStartedPulling="2025-12-02 14:30:42.054775682 +0000 UTC m=+873.246084391" lastFinishedPulling="2025-12-02 14:30:46.909388139 +0000 UTC m=+878.100696848" observedRunningTime="2025-12-02 14:30:47.12477989 +0000 UTC m=+878.316088599" watchObservedRunningTime="2025-12-02 14:30:47.128356661 +0000 UTC m=+878.319665370" Dec 02 14:30:47 crc kubenswrapper[4902]: I1202 14:30:47.153404 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" podStartSLOduration=0.966554545 podStartE2EDuration="6.153380632s" podCreationTimestamp="2025-12-02 14:30:41 +0000 UTC" firstStartedPulling="2025-12-02 14:30:41.700244478 +0000 UTC m=+872.891553187" lastFinishedPulling="2025-12-02 14:30:46.887070565 +0000 UTC m=+878.078379274" observedRunningTime="2025-12-02 14:30:47.148914315 +0000 UTC m=+878.340223034" watchObservedRunningTime="2025-12-02 14:30:47.153380632 +0000 UTC m=+878.344689361" Dec 02 14:31:01 crc kubenswrapper[4902]: I1202 14:31:01.803180 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-56874848dd-bf6qw" Dec 02 14:31:09 crc kubenswrapper[4902]: I1202 14:31:09.955106 4902 scope.go:117] "RemoveContainer" containerID="ed5c0720557b37cc99e12d767b4387172f8bbc6edba471c9717c6f1c1cb17251" Dec 02 14:31:11 crc kubenswrapper[4902]: I1202 14:31:11.005087 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cqxsx"] Dec 02 14:31:11 crc kubenswrapper[4902]: I1202 14:31:11.007511 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:11 crc kubenswrapper[4902]: I1202 14:31:11.077614 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cqxsx"] Dec 02 14:31:11 crc kubenswrapper[4902]: I1202 14:31:11.170118 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnrrl\" (UniqueName: \"kubernetes.io/projected/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-kube-api-access-xnrrl\") pod \"certified-operators-cqxsx\" (UID: \"ba269d25-2abd-4b6a-9e4f-5d3ad207f749\") " pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:11 crc kubenswrapper[4902]: I1202 14:31:11.170173 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-catalog-content\") pod \"certified-operators-cqxsx\" (UID: \"ba269d25-2abd-4b6a-9e4f-5d3ad207f749\") " pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:11 crc kubenswrapper[4902]: I1202 14:31:11.170208 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-utilities\") pod \"certified-operators-cqxsx\" (UID: \"ba269d25-2abd-4b6a-9e4f-5d3ad207f749\") " pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:11 crc kubenswrapper[4902]: I1202 14:31:11.271009 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-catalog-content\") pod \"certified-operators-cqxsx\" (UID: \"ba269d25-2abd-4b6a-9e4f-5d3ad207f749\") " pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:11 crc kubenswrapper[4902]: I1202 14:31:11.271138 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-utilities\") pod \"certified-operators-cqxsx\" (UID: \"ba269d25-2abd-4b6a-9e4f-5d3ad207f749\") " pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:11 crc kubenswrapper[4902]: I1202 14:31:11.271406 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnrrl\" (UniqueName: \"kubernetes.io/projected/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-kube-api-access-xnrrl\") pod \"certified-operators-cqxsx\" (UID: \"ba269d25-2abd-4b6a-9e4f-5d3ad207f749\") " pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:11 crc kubenswrapper[4902]: I1202 14:31:11.271532 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-catalog-content\") pod \"certified-operators-cqxsx\" (UID: \"ba269d25-2abd-4b6a-9e4f-5d3ad207f749\") " pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:11 crc kubenswrapper[4902]: I1202 14:31:11.271648 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-utilities\") pod \"certified-operators-cqxsx\" (UID: \"ba269d25-2abd-4b6a-9e4f-5d3ad207f749\") " pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:11 crc kubenswrapper[4902]: I1202 14:31:11.291166 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnrrl\" (UniqueName: \"kubernetes.io/projected/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-kube-api-access-xnrrl\") pod \"certified-operators-cqxsx\" (UID: \"ba269d25-2abd-4b6a-9e4f-5d3ad207f749\") " pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:11 crc kubenswrapper[4902]: I1202 14:31:11.411987 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:11 crc kubenswrapper[4902]: I1202 14:31:11.844891 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cqxsx"] Dec 02 14:31:12 crc kubenswrapper[4902]: I1202 14:31:12.299198 4902 generic.go:334] "Generic (PLEG): container finished" podID="ba269d25-2abd-4b6a-9e4f-5d3ad207f749" containerID="5ce8edfe633d24f8f015da23d6e666562370ee45ac6d86a46e0a2b98074872f5" exitCode=0 Dec 02 14:31:12 crc kubenswrapper[4902]: I1202 14:31:12.299280 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cqxsx" event={"ID":"ba269d25-2abd-4b6a-9e4f-5d3ad207f749","Type":"ContainerDied","Data":"5ce8edfe633d24f8f015da23d6e666562370ee45ac6d86a46e0a2b98074872f5"} Dec 02 14:31:12 crc kubenswrapper[4902]: I1202 14:31:12.299490 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cqxsx" event={"ID":"ba269d25-2abd-4b6a-9e4f-5d3ad207f749","Type":"ContainerStarted","Data":"1f2f7bc4d30c4d418a03b930a0dd614458477216d01a933f2f0b742b8f3190a0"} Dec 02 14:31:14 crc kubenswrapper[4902]: I1202 14:31:14.312478 4902 generic.go:334] "Generic (PLEG): container finished" podID="ba269d25-2abd-4b6a-9e4f-5d3ad207f749" containerID="aed38b27f95e869530a0ef42514c22b6295aabc41b92ad02ba711524029a449f" exitCode=0 Dec 02 14:31:14 crc kubenswrapper[4902]: I1202 14:31:14.312532 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cqxsx" event={"ID":"ba269d25-2abd-4b6a-9e4f-5d3ad207f749","Type":"ContainerDied","Data":"aed38b27f95e869530a0ef42514c22b6295aabc41b92ad02ba711524029a449f"} Dec 02 14:31:16 crc kubenswrapper[4902]: I1202 14:31:16.338590 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cqxsx" event={"ID":"ba269d25-2abd-4b6a-9e4f-5d3ad207f749","Type":"ContainerStarted","Data":"b352ca4379751bd3bf5ebdde1893ebce3a5a1bce19b8a4987ad14bad7277cb83"} Dec 02 14:31:16 crc kubenswrapper[4902]: I1202 14:31:16.356017 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cqxsx" podStartSLOduration=3.360906033 podStartE2EDuration="6.355998435s" podCreationTimestamp="2025-12-02 14:31:10 +0000 UTC" firstStartedPulling="2025-12-02 14:31:12.301136301 +0000 UTC m=+903.492445050" lastFinishedPulling="2025-12-02 14:31:15.296228703 +0000 UTC m=+906.487537452" observedRunningTime="2025-12-02 14:31:16.355764598 +0000 UTC m=+907.547073317" watchObservedRunningTime="2025-12-02 14:31:16.355998435 +0000 UTC m=+907.547307144" Dec 02 14:31:21 crc kubenswrapper[4902]: I1202 14:31:21.412259 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:21 crc kubenswrapper[4902]: I1202 14:31:21.412829 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:21 crc kubenswrapper[4902]: I1202 14:31:21.440134 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-644fb8ffcc-frgm7" Dec 02 14:31:21 crc kubenswrapper[4902]: I1202 14:31:21.508509 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.355473 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-2rnt6"] Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.358710 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.361061 4902 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-66wdj" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.361232 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.363578 4902 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.365131 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx"] Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.366098 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.372595 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx"] Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.373345 4902 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.427255 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.455518 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-rk7ql"] Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.456737 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-rk7ql" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.457171 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b4b0b031-e720-4ff3-9c48-545f116b5473-metrics\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.457197 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b4b0b031-e720-4ff3-9c48-545f116b5473-frr-conf\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.457246 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74c5d\" (UniqueName: \"kubernetes.io/projected/b4b0b031-e720-4ff3-9c48-545f116b5473-kube-api-access-74c5d\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.457354 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4b0b031-e720-4ff3-9c48-545f116b5473-metrics-certs\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.457425 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wlmd\" (UniqueName: \"kubernetes.io/projected/e15d73a9-a10f-40b9-8a07-ae14a383d2ba-kube-api-access-7wlmd\") pod \"frr-k8s-webhook-server-7fcb986d4-p7rgx\" (UID: \"e15d73a9-a10f-40b9-8a07-ae14a383d2ba\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.457470 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b4b0b031-e720-4ff3-9c48-545f116b5473-frr-startup\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.457525 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e15d73a9-a10f-40b9-8a07-ae14a383d2ba-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-p7rgx\" (UID: \"e15d73a9-a10f-40b9-8a07-ae14a383d2ba\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.457551 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b4b0b031-e720-4ff3-9c48-545f116b5473-frr-sockets\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.457581 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b4b0b031-e720-4ff3-9c48-545f116b5473-reloader\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.461858 4902 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.461931 4902 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.462013 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.465837 4902 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-n87xm" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.472044 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-fvt5m"] Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.474680 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-fvt5m" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.481030 4902 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.510587 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-fvt5m"] Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.525164 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cqxsx"] Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.558980 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/673f9521-fb68-4ec8-9190-cf0315b14280-metrics-certs\") pod \"controller-f8648f98b-fvt5m\" (UID: \"673f9521-fb68-4ec8-9190-cf0315b14280\") " pod="metallb-system/controller-f8648f98b-fvt5m" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.559022 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kp4w5\" (UniqueName: \"kubernetes.io/projected/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-kube-api-access-kp4w5\") pod \"speaker-rk7ql\" (UID: \"4650d7ad-6b89-444d-a21d-3fad55e3a7b1\") " pod="metallb-system/speaker-rk7ql" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.559061 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4b0b031-e720-4ff3-9c48-545f116b5473-metrics-certs\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.559209 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/673f9521-fb68-4ec8-9190-cf0315b14280-cert\") pod \"controller-f8648f98b-fvt5m\" (UID: \"673f9521-fb68-4ec8-9190-cf0315b14280\") " pod="metallb-system/controller-f8648f98b-fvt5m" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.559274 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wlmd\" (UniqueName: \"kubernetes.io/projected/e15d73a9-a10f-40b9-8a07-ae14a383d2ba-kube-api-access-7wlmd\") pod \"frr-k8s-webhook-server-7fcb986d4-p7rgx\" (UID: \"e15d73a9-a10f-40b9-8a07-ae14a383d2ba\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.559329 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b4b0b031-e720-4ff3-9c48-545f116b5473-frr-startup\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.559356 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e15d73a9-a10f-40b9-8a07-ae14a383d2ba-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-p7rgx\" (UID: \"e15d73a9-a10f-40b9-8a07-ae14a383d2ba\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.559399 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b4b0b031-e720-4ff3-9c48-545f116b5473-frr-sockets\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.559418 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b4b0b031-e720-4ff3-9c48-545f116b5473-reloader\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.559460 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-metrics-certs\") pod \"speaker-rk7ql\" (UID: \"4650d7ad-6b89-444d-a21d-3fad55e3a7b1\") " pod="metallb-system/speaker-rk7ql" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.559486 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b4b0b031-e720-4ff3-9c48-545f116b5473-metrics\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.559506 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nc9l\" (UniqueName: \"kubernetes.io/projected/673f9521-fb68-4ec8-9190-cf0315b14280-kube-api-access-8nc9l\") pod \"controller-f8648f98b-fvt5m\" (UID: \"673f9521-fb68-4ec8-9190-cf0315b14280\") " pod="metallb-system/controller-f8648f98b-fvt5m" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.559526 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b4b0b031-e720-4ff3-9c48-545f116b5473-frr-conf\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.559628 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-metallb-excludel2\") pod \"speaker-rk7ql\" (UID: \"4650d7ad-6b89-444d-a21d-3fad55e3a7b1\") " pod="metallb-system/speaker-rk7ql" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.559705 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74c5d\" (UniqueName: \"kubernetes.io/projected/b4b0b031-e720-4ff3-9c48-545f116b5473-kube-api-access-74c5d\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.559739 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-memberlist\") pod \"speaker-rk7ql\" (UID: \"4650d7ad-6b89-444d-a21d-3fad55e3a7b1\") " pod="metallb-system/speaker-rk7ql" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.560010 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b4b0b031-e720-4ff3-9c48-545f116b5473-frr-sockets\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.560018 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b4b0b031-e720-4ff3-9c48-545f116b5473-metrics\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.560220 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b4b0b031-e720-4ff3-9c48-545f116b5473-frr-conf\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.560382 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b4b0b031-e720-4ff3-9c48-545f116b5473-frr-startup\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.563875 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b4b0b031-e720-4ff3-9c48-545f116b5473-reloader\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.567815 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e15d73a9-a10f-40b9-8a07-ae14a383d2ba-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-p7rgx\" (UID: \"e15d73a9-a10f-40b9-8a07-ae14a383d2ba\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.578254 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4b0b031-e720-4ff3-9c48-545f116b5473-metrics-certs\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.580701 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wlmd\" (UniqueName: \"kubernetes.io/projected/e15d73a9-a10f-40b9-8a07-ae14a383d2ba-kube-api-access-7wlmd\") pod \"frr-k8s-webhook-server-7fcb986d4-p7rgx\" (UID: \"e15d73a9-a10f-40b9-8a07-ae14a383d2ba\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.581456 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74c5d\" (UniqueName: \"kubernetes.io/projected/b4b0b031-e720-4ff3-9c48-545f116b5473-kube-api-access-74c5d\") pod \"frr-k8s-2rnt6\" (UID: \"b4b0b031-e720-4ff3-9c48-545f116b5473\") " pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.660977 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/673f9521-fb68-4ec8-9190-cf0315b14280-cert\") pod \"controller-f8648f98b-fvt5m\" (UID: \"673f9521-fb68-4ec8-9190-cf0315b14280\") " pod="metallb-system/controller-f8648f98b-fvt5m" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.661057 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-metrics-certs\") pod \"speaker-rk7ql\" (UID: \"4650d7ad-6b89-444d-a21d-3fad55e3a7b1\") " pod="metallb-system/speaker-rk7ql" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.661082 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nc9l\" (UniqueName: \"kubernetes.io/projected/673f9521-fb68-4ec8-9190-cf0315b14280-kube-api-access-8nc9l\") pod \"controller-f8648f98b-fvt5m\" (UID: \"673f9521-fb68-4ec8-9190-cf0315b14280\") " pod="metallb-system/controller-f8648f98b-fvt5m" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.661133 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-metallb-excludel2\") pod \"speaker-rk7ql\" (UID: \"4650d7ad-6b89-444d-a21d-3fad55e3a7b1\") " pod="metallb-system/speaker-rk7ql" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.661179 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-memberlist\") pod \"speaker-rk7ql\" (UID: \"4650d7ad-6b89-444d-a21d-3fad55e3a7b1\") " pod="metallb-system/speaker-rk7ql" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.661233 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/673f9521-fb68-4ec8-9190-cf0315b14280-metrics-certs\") pod \"controller-f8648f98b-fvt5m\" (UID: \"673f9521-fb68-4ec8-9190-cf0315b14280\") " pod="metallb-system/controller-f8648f98b-fvt5m" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.661264 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kp4w5\" (UniqueName: \"kubernetes.io/projected/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-kube-api-access-kp4w5\") pod \"speaker-rk7ql\" (UID: \"4650d7ad-6b89-444d-a21d-3fad55e3a7b1\") " pod="metallb-system/speaker-rk7ql" Dec 02 14:31:22 crc kubenswrapper[4902]: E1202 14:31:22.661316 4902 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 02 14:31:22 crc kubenswrapper[4902]: E1202 14:31:22.661411 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-memberlist podName:4650d7ad-6b89-444d-a21d-3fad55e3a7b1 nodeName:}" failed. No retries permitted until 2025-12-02 14:31:23.161388185 +0000 UTC m=+914.352696954 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-memberlist") pod "speaker-rk7ql" (UID: "4650d7ad-6b89-444d-a21d-3fad55e3a7b1") : secret "metallb-memberlist" not found Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.662129 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-metallb-excludel2\") pod \"speaker-rk7ql\" (UID: \"4650d7ad-6b89-444d-a21d-3fad55e3a7b1\") " pod="metallb-system/speaker-rk7ql" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.663009 4902 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.664576 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-metrics-certs\") pod \"speaker-rk7ql\" (UID: \"4650d7ad-6b89-444d-a21d-3fad55e3a7b1\") " pod="metallb-system/speaker-rk7ql" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.665317 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/673f9521-fb68-4ec8-9190-cf0315b14280-metrics-certs\") pod \"controller-f8648f98b-fvt5m\" (UID: \"673f9521-fb68-4ec8-9190-cf0315b14280\") " pod="metallb-system/controller-f8648f98b-fvt5m" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.674175 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/673f9521-fb68-4ec8-9190-cf0315b14280-cert\") pod \"controller-f8648f98b-fvt5m\" (UID: \"673f9521-fb68-4ec8-9190-cf0315b14280\") " pod="metallb-system/controller-f8648f98b-fvt5m" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.678161 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kp4w5\" (UniqueName: \"kubernetes.io/projected/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-kube-api-access-kp4w5\") pod \"speaker-rk7ql\" (UID: \"4650d7ad-6b89-444d-a21d-3fad55e3a7b1\") " pod="metallb-system/speaker-rk7ql" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.680032 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.680307 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nc9l\" (UniqueName: \"kubernetes.io/projected/673f9521-fb68-4ec8-9190-cf0315b14280-kube-api-access-8nc9l\") pod \"controller-f8648f98b-fvt5m\" (UID: \"673f9521-fb68-4ec8-9190-cf0315b14280\") " pod="metallb-system/controller-f8648f98b-fvt5m" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.688263 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.824102 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-fvt5m" Dec 02 14:31:22 crc kubenswrapper[4902]: I1202 14:31:22.948905 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx"] Dec 02 14:31:22 crc kubenswrapper[4902]: W1202 14:31:22.953891 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode15d73a9_a10f_40b9_8a07_ae14a383d2ba.slice/crio-4cefbc08950617e87683b2a47aa89a73f021772c9ee8bc9b27e4fea1236b5fa1 WatchSource:0}: Error finding container 4cefbc08950617e87683b2a47aa89a73f021772c9ee8bc9b27e4fea1236b5fa1: Status 404 returned error can't find the container with id 4cefbc08950617e87683b2a47aa89a73f021772c9ee8bc9b27e4fea1236b5fa1 Dec 02 14:31:23 crc kubenswrapper[4902]: I1202 14:31:23.168920 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-memberlist\") pod \"speaker-rk7ql\" (UID: \"4650d7ad-6b89-444d-a21d-3fad55e3a7b1\") " pod="metallb-system/speaker-rk7ql" Dec 02 14:31:23 crc kubenswrapper[4902]: E1202 14:31:23.169137 4902 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 02 14:31:23 crc kubenswrapper[4902]: E1202 14:31:23.169228 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-memberlist podName:4650d7ad-6b89-444d-a21d-3fad55e3a7b1 nodeName:}" failed. No retries permitted until 2025-12-02 14:31:24.16920679 +0000 UTC m=+915.360515509 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-memberlist") pod "speaker-rk7ql" (UID: "4650d7ad-6b89-444d-a21d-3fad55e3a7b1") : secret "metallb-memberlist" not found Dec 02 14:31:23 crc kubenswrapper[4902]: I1202 14:31:23.256236 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-fvt5m"] Dec 02 14:31:23 crc kubenswrapper[4902]: I1202 14:31:23.386840 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-fvt5m" event={"ID":"673f9521-fb68-4ec8-9190-cf0315b14280","Type":"ContainerStarted","Data":"078d3b927a70b03e748b22add444c043b1e1ceba2bcbf32278b6bb0dc403ec33"} Dec 02 14:31:23 crc kubenswrapper[4902]: I1202 14:31:23.387868 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx" event={"ID":"e15d73a9-a10f-40b9-8a07-ae14a383d2ba","Type":"ContainerStarted","Data":"4cefbc08950617e87683b2a47aa89a73f021772c9ee8bc9b27e4fea1236b5fa1"} Dec 02 14:31:24 crc kubenswrapper[4902]: I1202 14:31:24.181649 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-memberlist\") pod \"speaker-rk7ql\" (UID: \"4650d7ad-6b89-444d-a21d-3fad55e3a7b1\") " pod="metallb-system/speaker-rk7ql" Dec 02 14:31:24 crc kubenswrapper[4902]: I1202 14:31:24.189334 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/4650d7ad-6b89-444d-a21d-3fad55e3a7b1-memberlist\") pod \"speaker-rk7ql\" (UID: \"4650d7ad-6b89-444d-a21d-3fad55e3a7b1\") " pod="metallb-system/speaker-rk7ql" Dec 02 14:31:24 crc kubenswrapper[4902]: I1202 14:31:24.311425 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-rk7ql" Dec 02 14:31:24 crc kubenswrapper[4902]: W1202 14:31:24.333097 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4650d7ad_6b89_444d_a21d_3fad55e3a7b1.slice/crio-0ca8672af03b0b7c8fef5acad0aef5af5b12adcea46a10eed08e509259dec392 WatchSource:0}: Error finding container 0ca8672af03b0b7c8fef5acad0aef5af5b12adcea46a10eed08e509259dec392: Status 404 returned error can't find the container with id 0ca8672af03b0b7c8fef5acad0aef5af5b12adcea46a10eed08e509259dec392 Dec 02 14:31:24 crc kubenswrapper[4902]: I1202 14:31:24.402232 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-rk7ql" event={"ID":"4650d7ad-6b89-444d-a21d-3fad55e3a7b1","Type":"ContainerStarted","Data":"0ca8672af03b0b7c8fef5acad0aef5af5b12adcea46a10eed08e509259dec392"} Dec 02 14:31:24 crc kubenswrapper[4902]: I1202 14:31:24.410097 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2rnt6" event={"ID":"b4b0b031-e720-4ff3-9c48-545f116b5473","Type":"ContainerStarted","Data":"756c947c9fd146c878760ce6205593e8082115eec1a17f0df2ec4d1d7d734608"} Dec 02 14:31:24 crc kubenswrapper[4902]: I1202 14:31:24.412019 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-fvt5m" event={"ID":"673f9521-fb68-4ec8-9190-cf0315b14280","Type":"ContainerStarted","Data":"6d9acceaa59c652ec860600af5dcac3a7498e87f0976a5d2509976fd333a4bc8"} Dec 02 14:31:24 crc kubenswrapper[4902]: I1202 14:31:24.412089 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-fvt5m" event={"ID":"673f9521-fb68-4ec8-9190-cf0315b14280","Type":"ContainerStarted","Data":"fcc67dd20eba8bc5bc7e7250cb4f723f4ef8a77ce785ed42f775e68ee23cc454"} Dec 02 14:31:24 crc kubenswrapper[4902]: I1202 14:31:24.412246 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cqxsx" podUID="ba269d25-2abd-4b6a-9e4f-5d3ad207f749" containerName="registry-server" containerID="cri-o://b352ca4379751bd3bf5ebdde1893ebce3a5a1bce19b8a4987ad14bad7277cb83" gracePeriod=2 Dec 02 14:31:24 crc kubenswrapper[4902]: I1202 14:31:24.439112 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-fvt5m" podStartSLOduration=2.439094561 podStartE2EDuration="2.439094561s" podCreationTimestamp="2025-12-02 14:31:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:31:24.432878624 +0000 UTC m=+915.624187343" watchObservedRunningTime="2025-12-02 14:31:24.439094561 +0000 UTC m=+915.630403270" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.315651 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.396672 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnrrl\" (UniqueName: \"kubernetes.io/projected/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-kube-api-access-xnrrl\") pod \"ba269d25-2abd-4b6a-9e4f-5d3ad207f749\" (UID: \"ba269d25-2abd-4b6a-9e4f-5d3ad207f749\") " Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.396755 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-utilities\") pod \"ba269d25-2abd-4b6a-9e4f-5d3ad207f749\" (UID: \"ba269d25-2abd-4b6a-9e4f-5d3ad207f749\") " Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.396837 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-catalog-content\") pod \"ba269d25-2abd-4b6a-9e4f-5d3ad207f749\" (UID: \"ba269d25-2abd-4b6a-9e4f-5d3ad207f749\") " Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.397791 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-utilities" (OuterVolumeSpecName: "utilities") pod "ba269d25-2abd-4b6a-9e4f-5d3ad207f749" (UID: "ba269d25-2abd-4b6a-9e4f-5d3ad207f749"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.420532 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-kube-api-access-xnrrl" (OuterVolumeSpecName: "kube-api-access-xnrrl") pod "ba269d25-2abd-4b6a-9e4f-5d3ad207f749" (UID: "ba269d25-2abd-4b6a-9e4f-5d3ad207f749"). InnerVolumeSpecName "kube-api-access-xnrrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.427441 4902 generic.go:334] "Generic (PLEG): container finished" podID="ba269d25-2abd-4b6a-9e4f-5d3ad207f749" containerID="b352ca4379751bd3bf5ebdde1893ebce3a5a1bce19b8a4987ad14bad7277cb83" exitCode=0 Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.427513 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cqxsx" event={"ID":"ba269d25-2abd-4b6a-9e4f-5d3ad207f749","Type":"ContainerDied","Data":"b352ca4379751bd3bf5ebdde1893ebce3a5a1bce19b8a4987ad14bad7277cb83"} Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.427545 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cqxsx" event={"ID":"ba269d25-2abd-4b6a-9e4f-5d3ad207f749","Type":"ContainerDied","Data":"1f2f7bc4d30c4d418a03b930a0dd614458477216d01a933f2f0b742b8f3190a0"} Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.427600 4902 scope.go:117] "RemoveContainer" containerID="b352ca4379751bd3bf5ebdde1893ebce3a5a1bce19b8a4987ad14bad7277cb83" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.427737 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cqxsx" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.431632 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-rk7ql" event={"ID":"4650d7ad-6b89-444d-a21d-3fad55e3a7b1","Type":"ContainerStarted","Data":"029681ee3162e3f0124712b8cf2d34b2a895ac1eba8c4edb6be6eca5e47ad64f"} Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.431677 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-rk7ql" event={"ID":"4650d7ad-6b89-444d-a21d-3fad55e3a7b1","Type":"ContainerStarted","Data":"4e00a8b0d2ea86c0c1a3ca5d35ded2afce05be10596f02322c63ef4c5d8ee4ef"} Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.431702 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-fvt5m" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.431716 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-rk7ql" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.449775 4902 scope.go:117] "RemoveContainer" containerID="aed38b27f95e869530a0ef42514c22b6295aabc41b92ad02ba711524029a449f" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.460303 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-rk7ql" podStartSLOduration=3.4602795410000002 podStartE2EDuration="3.460279541s" podCreationTimestamp="2025-12-02 14:31:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:31:25.459943032 +0000 UTC m=+916.651251741" watchObservedRunningTime="2025-12-02 14:31:25.460279541 +0000 UTC m=+916.651588280" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.466273 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ba269d25-2abd-4b6a-9e4f-5d3ad207f749" (UID: "ba269d25-2abd-4b6a-9e4f-5d3ad207f749"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.472581 4902 scope.go:117] "RemoveContainer" containerID="5ce8edfe633d24f8f015da23d6e666562370ee45ac6d86a46e0a2b98074872f5" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.498410 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.498446 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.498458 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnrrl\" (UniqueName: \"kubernetes.io/projected/ba269d25-2abd-4b6a-9e4f-5d3ad207f749-kube-api-access-xnrrl\") on node \"crc\" DevicePath \"\"" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.504213 4902 scope.go:117] "RemoveContainer" containerID="b352ca4379751bd3bf5ebdde1893ebce3a5a1bce19b8a4987ad14bad7277cb83" Dec 02 14:31:25 crc kubenswrapper[4902]: E1202 14:31:25.504830 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b352ca4379751bd3bf5ebdde1893ebce3a5a1bce19b8a4987ad14bad7277cb83\": container with ID starting with b352ca4379751bd3bf5ebdde1893ebce3a5a1bce19b8a4987ad14bad7277cb83 not found: ID does not exist" containerID="b352ca4379751bd3bf5ebdde1893ebce3a5a1bce19b8a4987ad14bad7277cb83" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.504870 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b352ca4379751bd3bf5ebdde1893ebce3a5a1bce19b8a4987ad14bad7277cb83"} err="failed to get container status \"b352ca4379751bd3bf5ebdde1893ebce3a5a1bce19b8a4987ad14bad7277cb83\": rpc error: code = NotFound desc = could not find container \"b352ca4379751bd3bf5ebdde1893ebce3a5a1bce19b8a4987ad14bad7277cb83\": container with ID starting with b352ca4379751bd3bf5ebdde1893ebce3a5a1bce19b8a4987ad14bad7277cb83 not found: ID does not exist" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.504897 4902 scope.go:117] "RemoveContainer" containerID="aed38b27f95e869530a0ef42514c22b6295aabc41b92ad02ba711524029a449f" Dec 02 14:31:25 crc kubenswrapper[4902]: E1202 14:31:25.509008 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aed38b27f95e869530a0ef42514c22b6295aabc41b92ad02ba711524029a449f\": container with ID starting with aed38b27f95e869530a0ef42514c22b6295aabc41b92ad02ba711524029a449f not found: ID does not exist" containerID="aed38b27f95e869530a0ef42514c22b6295aabc41b92ad02ba711524029a449f" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.509048 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aed38b27f95e869530a0ef42514c22b6295aabc41b92ad02ba711524029a449f"} err="failed to get container status \"aed38b27f95e869530a0ef42514c22b6295aabc41b92ad02ba711524029a449f\": rpc error: code = NotFound desc = could not find container \"aed38b27f95e869530a0ef42514c22b6295aabc41b92ad02ba711524029a449f\": container with ID starting with aed38b27f95e869530a0ef42514c22b6295aabc41b92ad02ba711524029a449f not found: ID does not exist" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.509089 4902 scope.go:117] "RemoveContainer" containerID="5ce8edfe633d24f8f015da23d6e666562370ee45ac6d86a46e0a2b98074872f5" Dec 02 14:31:25 crc kubenswrapper[4902]: E1202 14:31:25.509600 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ce8edfe633d24f8f015da23d6e666562370ee45ac6d86a46e0a2b98074872f5\": container with ID starting with 5ce8edfe633d24f8f015da23d6e666562370ee45ac6d86a46e0a2b98074872f5 not found: ID does not exist" containerID="5ce8edfe633d24f8f015da23d6e666562370ee45ac6d86a46e0a2b98074872f5" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.509630 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ce8edfe633d24f8f015da23d6e666562370ee45ac6d86a46e0a2b98074872f5"} err="failed to get container status \"5ce8edfe633d24f8f015da23d6e666562370ee45ac6d86a46e0a2b98074872f5\": rpc error: code = NotFound desc = could not find container \"5ce8edfe633d24f8f015da23d6e666562370ee45ac6d86a46e0a2b98074872f5\": container with ID starting with 5ce8edfe633d24f8f015da23d6e666562370ee45ac6d86a46e0a2b98074872f5 not found: ID does not exist" Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.760462 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cqxsx"] Dec 02 14:31:25 crc kubenswrapper[4902]: I1202 14:31:25.764239 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cqxsx"] Dec 02 14:31:27 crc kubenswrapper[4902]: I1202 14:31:27.116006 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba269d25-2abd-4b6a-9e4f-5d3ad207f749" path="/var/lib/kubelet/pods/ba269d25-2abd-4b6a-9e4f-5d3ad207f749/volumes" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.162226 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vcvq6"] Dec 02 14:31:30 crc kubenswrapper[4902]: E1202 14:31:30.163133 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba269d25-2abd-4b6a-9e4f-5d3ad207f749" containerName="registry-server" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.163149 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba269d25-2abd-4b6a-9e4f-5d3ad207f749" containerName="registry-server" Dec 02 14:31:30 crc kubenswrapper[4902]: E1202 14:31:30.163173 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba269d25-2abd-4b6a-9e4f-5d3ad207f749" containerName="extract-content" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.163182 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba269d25-2abd-4b6a-9e4f-5d3ad207f749" containerName="extract-content" Dec 02 14:31:30 crc kubenswrapper[4902]: E1202 14:31:30.163195 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba269d25-2abd-4b6a-9e4f-5d3ad207f749" containerName="extract-utilities" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.163204 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba269d25-2abd-4b6a-9e4f-5d3ad207f749" containerName="extract-utilities" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.163341 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba269d25-2abd-4b6a-9e4f-5d3ad207f749" containerName="registry-server" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.164441 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.168013 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vcvq6"] Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.313177 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fb0bbc5-03f0-4d35-9727-f26bd725e139-utilities\") pod \"redhat-marketplace-vcvq6\" (UID: \"0fb0bbc5-03f0-4d35-9727-f26bd725e139\") " pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.313241 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fb0bbc5-03f0-4d35-9727-f26bd725e139-catalog-content\") pod \"redhat-marketplace-vcvq6\" (UID: \"0fb0bbc5-03f0-4d35-9727-f26bd725e139\") " pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.313272 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25frx\" (UniqueName: \"kubernetes.io/projected/0fb0bbc5-03f0-4d35-9727-f26bd725e139-kube-api-access-25frx\") pod \"redhat-marketplace-vcvq6\" (UID: \"0fb0bbc5-03f0-4d35-9727-f26bd725e139\") " pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.415625 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25frx\" (UniqueName: \"kubernetes.io/projected/0fb0bbc5-03f0-4d35-9727-f26bd725e139-kube-api-access-25frx\") pod \"redhat-marketplace-vcvq6\" (UID: \"0fb0bbc5-03f0-4d35-9727-f26bd725e139\") " pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.415817 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fb0bbc5-03f0-4d35-9727-f26bd725e139-utilities\") pod \"redhat-marketplace-vcvq6\" (UID: \"0fb0bbc5-03f0-4d35-9727-f26bd725e139\") " pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.415854 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fb0bbc5-03f0-4d35-9727-f26bd725e139-catalog-content\") pod \"redhat-marketplace-vcvq6\" (UID: \"0fb0bbc5-03f0-4d35-9727-f26bd725e139\") " pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.419171 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fb0bbc5-03f0-4d35-9727-f26bd725e139-utilities\") pod \"redhat-marketplace-vcvq6\" (UID: \"0fb0bbc5-03f0-4d35-9727-f26bd725e139\") " pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.419269 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fb0bbc5-03f0-4d35-9727-f26bd725e139-catalog-content\") pod \"redhat-marketplace-vcvq6\" (UID: \"0fb0bbc5-03f0-4d35-9727-f26bd725e139\") " pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.466783 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25frx\" (UniqueName: \"kubernetes.io/projected/0fb0bbc5-03f0-4d35-9727-f26bd725e139-kube-api-access-25frx\") pod \"redhat-marketplace-vcvq6\" (UID: \"0fb0bbc5-03f0-4d35-9727-f26bd725e139\") " pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:30 crc kubenswrapper[4902]: I1202 14:31:30.488397 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:31 crc kubenswrapper[4902]: I1202 14:31:31.026468 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vcvq6"] Dec 02 14:31:31 crc kubenswrapper[4902]: W1202 14:31:31.031306 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0fb0bbc5_03f0_4d35_9727_f26bd725e139.slice/crio-0d1db71dff51303806787c0325636d3ae7840a49a85a697271fd58f4299d327d WatchSource:0}: Error finding container 0d1db71dff51303806787c0325636d3ae7840a49a85a697271fd58f4299d327d: Status 404 returned error can't find the container with id 0d1db71dff51303806787c0325636d3ae7840a49a85a697271fd58f4299d327d Dec 02 14:31:31 crc kubenswrapper[4902]: I1202 14:31:31.475586 4902 generic.go:334] "Generic (PLEG): container finished" podID="b4b0b031-e720-4ff3-9c48-545f116b5473" containerID="90efbd3d0d0d67bc63778252092fb5c9504ec687e4ffb1babb0623b5ea9ea7a7" exitCode=0 Dec 02 14:31:31 crc kubenswrapper[4902]: I1202 14:31:31.475681 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2rnt6" event={"ID":"b4b0b031-e720-4ff3-9c48-545f116b5473","Type":"ContainerDied","Data":"90efbd3d0d0d67bc63778252092fb5c9504ec687e4ffb1babb0623b5ea9ea7a7"} Dec 02 14:31:31 crc kubenswrapper[4902]: I1202 14:31:31.479897 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx" event={"ID":"e15d73a9-a10f-40b9-8a07-ae14a383d2ba","Type":"ContainerStarted","Data":"592904f66b6950519b36f174b7155bc4cac10447de376efbd2d70fb821b457f7"} Dec 02 14:31:31 crc kubenswrapper[4902]: I1202 14:31:31.480031 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx" Dec 02 14:31:31 crc kubenswrapper[4902]: I1202 14:31:31.482152 4902 generic.go:334] "Generic (PLEG): container finished" podID="0fb0bbc5-03f0-4d35-9727-f26bd725e139" containerID="e3d89ef94bd82208371463e7e92f5f43f0b84d248da036bb079ce069f4553018" exitCode=0 Dec 02 14:31:31 crc kubenswrapper[4902]: I1202 14:31:31.482198 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vcvq6" event={"ID":"0fb0bbc5-03f0-4d35-9727-f26bd725e139","Type":"ContainerDied","Data":"e3d89ef94bd82208371463e7e92f5f43f0b84d248da036bb079ce069f4553018"} Dec 02 14:31:31 crc kubenswrapper[4902]: I1202 14:31:31.482226 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vcvq6" event={"ID":"0fb0bbc5-03f0-4d35-9727-f26bd725e139","Type":"ContainerStarted","Data":"0d1db71dff51303806787c0325636d3ae7840a49a85a697271fd58f4299d327d"} Dec 02 14:31:31 crc kubenswrapper[4902]: I1202 14:31:31.571078 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx" podStartSLOduration=2.065674735 podStartE2EDuration="9.571049467s" podCreationTimestamp="2025-12-02 14:31:22 +0000 UTC" firstStartedPulling="2025-12-02 14:31:22.955802653 +0000 UTC m=+914.147111362" lastFinishedPulling="2025-12-02 14:31:30.461177375 +0000 UTC m=+921.652486094" observedRunningTime="2025-12-02 14:31:31.562332539 +0000 UTC m=+922.753641248" watchObservedRunningTime="2025-12-02 14:31:31.571049467 +0000 UTC m=+922.762358216" Dec 02 14:31:31 crc kubenswrapper[4902]: E1202 14:31:31.713866 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb4b0b031_e720_4ff3_9c48_545f116b5473.slice/crio-conmon-83fe67aceda2fad9b3f2e83a8f389baeec73c00dffaf4af6b445f7c10b3f256d.scope\": RecentStats: unable to find data in memory cache]" Dec 02 14:31:32 crc kubenswrapper[4902]: I1202 14:31:32.494504 4902 generic.go:334] "Generic (PLEG): container finished" podID="b4b0b031-e720-4ff3-9c48-545f116b5473" containerID="83fe67aceda2fad9b3f2e83a8f389baeec73c00dffaf4af6b445f7c10b3f256d" exitCode=0 Dec 02 14:31:32 crc kubenswrapper[4902]: I1202 14:31:32.494644 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2rnt6" event={"ID":"b4b0b031-e720-4ff3-9c48-545f116b5473","Type":"ContainerDied","Data":"83fe67aceda2fad9b3f2e83a8f389baeec73c00dffaf4af6b445f7c10b3f256d"} Dec 02 14:31:33 crc kubenswrapper[4902]: I1202 14:31:33.505293 4902 generic.go:334] "Generic (PLEG): container finished" podID="b4b0b031-e720-4ff3-9c48-545f116b5473" containerID="5d5e4c761c83a67a9b6138a29f27d8ecb460d7e10feadf7660cbbb42e228cca9" exitCode=0 Dec 02 14:31:33 crc kubenswrapper[4902]: I1202 14:31:33.505409 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2rnt6" event={"ID":"b4b0b031-e720-4ff3-9c48-545f116b5473","Type":"ContainerDied","Data":"5d5e4c761c83a67a9b6138a29f27d8ecb460d7e10feadf7660cbbb42e228cca9"} Dec 02 14:31:33 crc kubenswrapper[4902]: I1202 14:31:33.512164 4902 generic.go:334] "Generic (PLEG): container finished" podID="0fb0bbc5-03f0-4d35-9727-f26bd725e139" containerID="5a566bf3658903add7142268c5b3e349ac12892ef76ba3891420c96516ce1eb9" exitCode=0 Dec 02 14:31:33 crc kubenswrapper[4902]: I1202 14:31:33.512280 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vcvq6" event={"ID":"0fb0bbc5-03f0-4d35-9727-f26bd725e139","Type":"ContainerDied","Data":"5a566bf3658903add7142268c5b3e349ac12892ef76ba3891420c96516ce1eb9"} Dec 02 14:31:34 crc kubenswrapper[4902]: I1202 14:31:34.315954 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-rk7ql" Dec 02 14:31:34 crc kubenswrapper[4902]: I1202 14:31:34.525415 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2rnt6" event={"ID":"b4b0b031-e720-4ff3-9c48-545f116b5473","Type":"ContainerStarted","Data":"3ba17e2bcaa0e889fa22d53b99f8fc03cd545f32a55eafb01ca7d858d0479f70"} Dec 02 14:31:34 crc kubenswrapper[4902]: I1202 14:31:34.525450 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2rnt6" event={"ID":"b4b0b031-e720-4ff3-9c48-545f116b5473","Type":"ContainerStarted","Data":"3e9321df50a6fb1f721719eb1d23f7245817a2cce8ba6e1204b287654a2c199a"} Dec 02 14:31:34 crc kubenswrapper[4902]: I1202 14:31:34.525462 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2rnt6" event={"ID":"b4b0b031-e720-4ff3-9c48-545f116b5473","Type":"ContainerStarted","Data":"6c26317779bfa738db494773c02ff6ac04b7e2c7842f524a24b0f7f8ee43cad5"} Dec 02 14:31:34 crc kubenswrapper[4902]: I1202 14:31:34.525470 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2rnt6" event={"ID":"b4b0b031-e720-4ff3-9c48-545f116b5473","Type":"ContainerStarted","Data":"cc6ee8503ccd3d4b5d8e25802ec339f602f5cf3cb1ac7b27f68aa68dd322b952"} Dec 02 14:31:34 crc kubenswrapper[4902]: I1202 14:31:34.525479 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2rnt6" event={"ID":"b4b0b031-e720-4ff3-9c48-545f116b5473","Type":"ContainerStarted","Data":"49ed2527d977432da7574a9390604c37cca224bf14d99aa43e3bb723b284eac0"} Dec 02 14:31:34 crc kubenswrapper[4902]: I1202 14:31:34.527707 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vcvq6" event={"ID":"0fb0bbc5-03f0-4d35-9727-f26bd725e139","Type":"ContainerStarted","Data":"6073938fd4436fdf8718a7258d0a5db22e0525dce90ae70b4efef05a198e920d"} Dec 02 14:31:34 crc kubenswrapper[4902]: I1202 14:31:34.561818 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vcvq6" podStartSLOduration=2.089142745 podStartE2EDuration="4.561799408s" podCreationTimestamp="2025-12-02 14:31:30 +0000 UTC" firstStartedPulling="2025-12-02 14:31:31.483755115 +0000 UTC m=+922.675063834" lastFinishedPulling="2025-12-02 14:31:33.956411788 +0000 UTC m=+925.147720497" observedRunningTime="2025-12-02 14:31:34.5580061 +0000 UTC m=+925.749314839" watchObservedRunningTime="2025-12-02 14:31:34.561799408 +0000 UTC m=+925.753108117" Dec 02 14:31:35 crc kubenswrapper[4902]: I1202 14:31:35.543542 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2rnt6" event={"ID":"b4b0b031-e720-4ff3-9c48-545f116b5473","Type":"ContainerStarted","Data":"fec9337380b97e4aa1ec11ff4a4279ac0a4585faf9e8a35b551af34442d7d83b"} Dec 02 14:31:35 crc kubenswrapper[4902]: I1202 14:31:35.583785 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-2rnt6" podStartSLOduration=6.913845898 podStartE2EDuration="13.58375261s" podCreationTimestamp="2025-12-02 14:31:22 +0000 UTC" firstStartedPulling="2025-12-02 14:31:23.796516413 +0000 UTC m=+914.987825132" lastFinishedPulling="2025-12-02 14:31:30.466423125 +0000 UTC m=+921.657731844" observedRunningTime="2025-12-02 14:31:35.571110001 +0000 UTC m=+926.762418720" watchObservedRunningTime="2025-12-02 14:31:35.58375261 +0000 UTC m=+926.775061359" Dec 02 14:31:36 crc kubenswrapper[4902]: I1202 14:31:36.551452 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:37 crc kubenswrapper[4902]: I1202 14:31:37.271478 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-mprvr"] Dec 02 14:31:37 crc kubenswrapper[4902]: I1202 14:31:37.272953 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mprvr" Dec 02 14:31:37 crc kubenswrapper[4902]: I1202 14:31:37.276152 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 02 14:31:37 crc kubenswrapper[4902]: I1202 14:31:37.280284 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-sdvw6" Dec 02 14:31:37 crc kubenswrapper[4902]: I1202 14:31:37.280692 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 02 14:31:37 crc kubenswrapper[4902]: I1202 14:31:37.285168 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mprvr"] Dec 02 14:31:37 crc kubenswrapper[4902]: I1202 14:31:37.406634 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ngp4\" (UniqueName: \"kubernetes.io/projected/07d385c8-daa7-4456-ad07-a1f2c296e8e0-kube-api-access-7ngp4\") pod \"openstack-operator-index-mprvr\" (UID: \"07d385c8-daa7-4456-ad07-a1f2c296e8e0\") " pod="openstack-operators/openstack-operator-index-mprvr" Dec 02 14:31:37 crc kubenswrapper[4902]: I1202 14:31:37.508280 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ngp4\" (UniqueName: \"kubernetes.io/projected/07d385c8-daa7-4456-ad07-a1f2c296e8e0-kube-api-access-7ngp4\") pod \"openstack-operator-index-mprvr\" (UID: \"07d385c8-daa7-4456-ad07-a1f2c296e8e0\") " pod="openstack-operators/openstack-operator-index-mprvr" Dec 02 14:31:37 crc kubenswrapper[4902]: I1202 14:31:37.529582 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ngp4\" (UniqueName: \"kubernetes.io/projected/07d385c8-daa7-4456-ad07-a1f2c296e8e0-kube-api-access-7ngp4\") pod \"openstack-operator-index-mprvr\" (UID: \"07d385c8-daa7-4456-ad07-a1f2c296e8e0\") " pod="openstack-operators/openstack-operator-index-mprvr" Dec 02 14:31:37 crc kubenswrapper[4902]: I1202 14:31:37.601631 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mprvr" Dec 02 14:31:37 crc kubenswrapper[4902]: I1202 14:31:37.681249 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:37 crc kubenswrapper[4902]: I1202 14:31:37.754763 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:37 crc kubenswrapper[4902]: I1202 14:31:37.866875 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mprvr"] Dec 02 14:31:38 crc kubenswrapper[4902]: I1202 14:31:38.573214 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mprvr" event={"ID":"07d385c8-daa7-4456-ad07-a1f2c296e8e0","Type":"ContainerStarted","Data":"676ec7dc6a5fd6e5db48b957e4e7357f89ae338819a6eb71e3e7d00d674fd4ae"} Dec 02 14:31:40 crc kubenswrapper[4902]: I1202 14:31:40.488378 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:40 crc kubenswrapper[4902]: I1202 14:31:40.488656 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:40 crc kubenswrapper[4902]: I1202 14:31:40.535727 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-mprvr"] Dec 02 14:31:40 crc kubenswrapper[4902]: I1202 14:31:40.536369 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:40 crc kubenswrapper[4902]: I1202 14:31:40.627953 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:41 crc kubenswrapper[4902]: I1202 14:31:41.147034 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-jw4ms"] Dec 02 14:31:41 crc kubenswrapper[4902]: I1202 14:31:41.148237 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jw4ms" Dec 02 14:31:41 crc kubenswrapper[4902]: I1202 14:31:41.151748 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jw4ms"] Dec 02 14:31:41 crc kubenswrapper[4902]: I1202 14:31:41.264998 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r54gr\" (UniqueName: \"kubernetes.io/projected/5a2a1e6b-0712-4b08-8dca-36da17c57d51-kube-api-access-r54gr\") pod \"openstack-operator-index-jw4ms\" (UID: \"5a2a1e6b-0712-4b08-8dca-36da17c57d51\") " pod="openstack-operators/openstack-operator-index-jw4ms" Dec 02 14:31:41 crc kubenswrapper[4902]: I1202 14:31:41.366168 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r54gr\" (UniqueName: \"kubernetes.io/projected/5a2a1e6b-0712-4b08-8dca-36da17c57d51-kube-api-access-r54gr\") pod \"openstack-operator-index-jw4ms\" (UID: \"5a2a1e6b-0712-4b08-8dca-36da17c57d51\") " pod="openstack-operators/openstack-operator-index-jw4ms" Dec 02 14:31:41 crc kubenswrapper[4902]: I1202 14:31:41.404041 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r54gr\" (UniqueName: \"kubernetes.io/projected/5a2a1e6b-0712-4b08-8dca-36da17c57d51-kube-api-access-r54gr\") pod \"openstack-operator-index-jw4ms\" (UID: \"5a2a1e6b-0712-4b08-8dca-36da17c57d51\") " pod="openstack-operators/openstack-operator-index-jw4ms" Dec 02 14:31:41 crc kubenswrapper[4902]: I1202 14:31:41.511993 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jw4ms" Dec 02 14:31:41 crc kubenswrapper[4902]: I1202 14:31:41.762209 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jw4ms"] Dec 02 14:31:42 crc kubenswrapper[4902]: I1202 14:31:42.603554 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jw4ms" event={"ID":"5a2a1e6b-0712-4b08-8dca-36da17c57d51","Type":"ContainerStarted","Data":"6bd7f6cc617c7b4b3cbd3c53ff5acc806c8bb31f4e8a5d00e29d4763e298efea"} Dec 02 14:31:42 crc kubenswrapper[4902]: I1202 14:31:42.604023 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jw4ms" event={"ID":"5a2a1e6b-0712-4b08-8dca-36da17c57d51","Type":"ContainerStarted","Data":"3f82b9f334300f0b46027485c5adc4f428aa2c61191f615e97eb06b9ed9159ed"} Dec 02 14:31:42 crc kubenswrapper[4902]: I1202 14:31:42.608819 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mprvr" event={"ID":"07d385c8-daa7-4456-ad07-a1f2c296e8e0","Type":"ContainerStarted","Data":"57cfe61a6bba864b227ada44c40945c4a41aa111923bfd31cef21bd98f8d88d4"} Dec 02 14:31:42 crc kubenswrapper[4902]: I1202 14:31:42.608949 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-mprvr" podUID="07d385c8-daa7-4456-ad07-a1f2c296e8e0" containerName="registry-server" containerID="cri-o://57cfe61a6bba864b227ada44c40945c4a41aa111923bfd31cef21bd98f8d88d4" gracePeriod=2 Dec 02 14:31:42 crc kubenswrapper[4902]: I1202 14:31:42.630698 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-jw4ms" podStartSLOduration=1.261912546 podStartE2EDuration="1.630668889s" podCreationTimestamp="2025-12-02 14:31:41 +0000 UTC" firstStartedPulling="2025-12-02 14:31:41.768937441 +0000 UTC m=+932.960246150" lastFinishedPulling="2025-12-02 14:31:42.137693784 +0000 UTC m=+933.329002493" observedRunningTime="2025-12-02 14:31:42.625963045 +0000 UTC m=+933.817271764" watchObservedRunningTime="2025-12-02 14:31:42.630668889 +0000 UTC m=+933.821977628" Dec 02 14:31:42 crc kubenswrapper[4902]: I1202 14:31:42.696318 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p7rgx" Dec 02 14:31:42 crc kubenswrapper[4902]: I1202 14:31:42.718750 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-mprvr" podStartSLOduration=2.594894579 podStartE2EDuration="5.718730952s" podCreationTimestamp="2025-12-02 14:31:37 +0000 UTC" firstStartedPulling="2025-12-02 14:31:37.872744091 +0000 UTC m=+929.064052800" lastFinishedPulling="2025-12-02 14:31:40.996580444 +0000 UTC m=+932.187889173" observedRunningTime="2025-12-02 14:31:42.652636953 +0000 UTC m=+933.843945682" watchObservedRunningTime="2025-12-02 14:31:42.718730952 +0000 UTC m=+933.910039671" Dec 02 14:31:42 crc kubenswrapper[4902]: I1202 14:31:42.833962 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-fvt5m" Dec 02 14:31:42 crc kubenswrapper[4902]: I1202 14:31:42.990982 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mprvr" Dec 02 14:31:43 crc kubenswrapper[4902]: I1202 14:31:43.093015 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ngp4\" (UniqueName: \"kubernetes.io/projected/07d385c8-daa7-4456-ad07-a1f2c296e8e0-kube-api-access-7ngp4\") pod \"07d385c8-daa7-4456-ad07-a1f2c296e8e0\" (UID: \"07d385c8-daa7-4456-ad07-a1f2c296e8e0\") " Dec 02 14:31:43 crc kubenswrapper[4902]: I1202 14:31:43.098759 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07d385c8-daa7-4456-ad07-a1f2c296e8e0-kube-api-access-7ngp4" (OuterVolumeSpecName: "kube-api-access-7ngp4") pod "07d385c8-daa7-4456-ad07-a1f2c296e8e0" (UID: "07d385c8-daa7-4456-ad07-a1f2c296e8e0"). InnerVolumeSpecName "kube-api-access-7ngp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:31:43 crc kubenswrapper[4902]: I1202 14:31:43.195681 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ngp4\" (UniqueName: \"kubernetes.io/projected/07d385c8-daa7-4456-ad07-a1f2c296e8e0-kube-api-access-7ngp4\") on node \"crc\" DevicePath \"\"" Dec 02 14:31:43 crc kubenswrapper[4902]: I1202 14:31:43.626551 4902 generic.go:334] "Generic (PLEG): container finished" podID="07d385c8-daa7-4456-ad07-a1f2c296e8e0" containerID="57cfe61a6bba864b227ada44c40945c4a41aa111923bfd31cef21bd98f8d88d4" exitCode=0 Dec 02 14:31:43 crc kubenswrapper[4902]: I1202 14:31:43.626692 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mprvr" Dec 02 14:31:43 crc kubenswrapper[4902]: I1202 14:31:43.626805 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mprvr" event={"ID":"07d385c8-daa7-4456-ad07-a1f2c296e8e0","Type":"ContainerDied","Data":"57cfe61a6bba864b227ada44c40945c4a41aa111923bfd31cef21bd98f8d88d4"} Dec 02 14:31:43 crc kubenswrapper[4902]: I1202 14:31:43.626868 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mprvr" event={"ID":"07d385c8-daa7-4456-ad07-a1f2c296e8e0","Type":"ContainerDied","Data":"676ec7dc6a5fd6e5db48b957e4e7357f89ae338819a6eb71e3e7d00d674fd4ae"} Dec 02 14:31:43 crc kubenswrapper[4902]: I1202 14:31:43.626898 4902 scope.go:117] "RemoveContainer" containerID="57cfe61a6bba864b227ada44c40945c4a41aa111923bfd31cef21bd98f8d88d4" Dec 02 14:31:43 crc kubenswrapper[4902]: I1202 14:31:43.654592 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-mprvr"] Dec 02 14:31:43 crc kubenswrapper[4902]: I1202 14:31:43.658591 4902 scope.go:117] "RemoveContainer" containerID="57cfe61a6bba864b227ada44c40945c4a41aa111923bfd31cef21bd98f8d88d4" Dec 02 14:31:43 crc kubenswrapper[4902]: E1202 14:31:43.659078 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57cfe61a6bba864b227ada44c40945c4a41aa111923bfd31cef21bd98f8d88d4\": container with ID starting with 57cfe61a6bba864b227ada44c40945c4a41aa111923bfd31cef21bd98f8d88d4 not found: ID does not exist" containerID="57cfe61a6bba864b227ada44c40945c4a41aa111923bfd31cef21bd98f8d88d4" Dec 02 14:31:43 crc kubenswrapper[4902]: I1202 14:31:43.659107 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57cfe61a6bba864b227ada44c40945c4a41aa111923bfd31cef21bd98f8d88d4"} err="failed to get container status \"57cfe61a6bba864b227ada44c40945c4a41aa111923bfd31cef21bd98f8d88d4\": rpc error: code = NotFound desc = could not find container \"57cfe61a6bba864b227ada44c40945c4a41aa111923bfd31cef21bd98f8d88d4\": container with ID starting with 57cfe61a6bba864b227ada44c40945c4a41aa111923bfd31cef21bd98f8d88d4 not found: ID does not exist" Dec 02 14:31:43 crc kubenswrapper[4902]: I1202 14:31:43.660923 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-mprvr"] Dec 02 14:31:44 crc kubenswrapper[4902]: I1202 14:31:44.939466 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vcvq6"] Dec 02 14:31:44 crc kubenswrapper[4902]: I1202 14:31:44.940175 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vcvq6" podUID="0fb0bbc5-03f0-4d35-9727-f26bd725e139" containerName="registry-server" containerID="cri-o://6073938fd4436fdf8718a7258d0a5db22e0525dce90ae70b4efef05a198e920d" gracePeriod=2 Dec 02 14:31:45 crc kubenswrapper[4902]: I1202 14:31:45.121405 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07d385c8-daa7-4456-ad07-a1f2c296e8e0" path="/var/lib/kubelet/pods/07d385c8-daa7-4456-ad07-a1f2c296e8e0/volumes" Dec 02 14:31:45 crc kubenswrapper[4902]: I1202 14:31:45.668652 4902 generic.go:334] "Generic (PLEG): container finished" podID="0fb0bbc5-03f0-4d35-9727-f26bd725e139" containerID="6073938fd4436fdf8718a7258d0a5db22e0525dce90ae70b4efef05a198e920d" exitCode=0 Dec 02 14:31:45 crc kubenswrapper[4902]: I1202 14:31:45.668698 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vcvq6" event={"ID":"0fb0bbc5-03f0-4d35-9727-f26bd725e139","Type":"ContainerDied","Data":"6073938fd4436fdf8718a7258d0a5db22e0525dce90ae70b4efef05a198e920d"} Dec 02 14:31:45 crc kubenswrapper[4902]: I1202 14:31:45.855750 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:45 crc kubenswrapper[4902]: I1202 14:31:45.934792 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25frx\" (UniqueName: \"kubernetes.io/projected/0fb0bbc5-03f0-4d35-9727-f26bd725e139-kube-api-access-25frx\") pod \"0fb0bbc5-03f0-4d35-9727-f26bd725e139\" (UID: \"0fb0bbc5-03f0-4d35-9727-f26bd725e139\") " Dec 02 14:31:45 crc kubenswrapper[4902]: I1202 14:31:45.934966 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fb0bbc5-03f0-4d35-9727-f26bd725e139-catalog-content\") pod \"0fb0bbc5-03f0-4d35-9727-f26bd725e139\" (UID: \"0fb0bbc5-03f0-4d35-9727-f26bd725e139\") " Dec 02 14:31:45 crc kubenswrapper[4902]: I1202 14:31:45.935030 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fb0bbc5-03f0-4d35-9727-f26bd725e139-utilities\") pod \"0fb0bbc5-03f0-4d35-9727-f26bd725e139\" (UID: \"0fb0bbc5-03f0-4d35-9727-f26bd725e139\") " Dec 02 14:31:45 crc kubenswrapper[4902]: I1202 14:31:45.936128 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fb0bbc5-03f0-4d35-9727-f26bd725e139-utilities" (OuterVolumeSpecName: "utilities") pod "0fb0bbc5-03f0-4d35-9727-f26bd725e139" (UID: "0fb0bbc5-03f0-4d35-9727-f26bd725e139"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:31:45 crc kubenswrapper[4902]: I1202 14:31:45.940844 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fb0bbc5-03f0-4d35-9727-f26bd725e139-kube-api-access-25frx" (OuterVolumeSpecName: "kube-api-access-25frx") pod "0fb0bbc5-03f0-4d35-9727-f26bd725e139" (UID: "0fb0bbc5-03f0-4d35-9727-f26bd725e139"). InnerVolumeSpecName "kube-api-access-25frx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:31:45 crc kubenswrapper[4902]: I1202 14:31:45.954780 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fb0bbc5-03f0-4d35-9727-f26bd725e139-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0fb0bbc5-03f0-4d35-9727-f26bd725e139" (UID: "0fb0bbc5-03f0-4d35-9727-f26bd725e139"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:31:46 crc kubenswrapper[4902]: I1202 14:31:46.036987 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fb0bbc5-03f0-4d35-9727-f26bd725e139-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:31:46 crc kubenswrapper[4902]: I1202 14:31:46.037033 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25frx\" (UniqueName: \"kubernetes.io/projected/0fb0bbc5-03f0-4d35-9727-f26bd725e139-kube-api-access-25frx\") on node \"crc\" DevicePath \"\"" Dec 02 14:31:46 crc kubenswrapper[4902]: I1202 14:31:46.037048 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fb0bbc5-03f0-4d35-9727-f26bd725e139-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:31:46 crc kubenswrapper[4902]: I1202 14:31:46.678104 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vcvq6" event={"ID":"0fb0bbc5-03f0-4d35-9727-f26bd725e139","Type":"ContainerDied","Data":"0d1db71dff51303806787c0325636d3ae7840a49a85a697271fd58f4299d327d"} Dec 02 14:31:46 crc kubenswrapper[4902]: I1202 14:31:46.678154 4902 scope.go:117] "RemoveContainer" containerID="6073938fd4436fdf8718a7258d0a5db22e0525dce90ae70b4efef05a198e920d" Dec 02 14:31:46 crc kubenswrapper[4902]: I1202 14:31:46.678173 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vcvq6" Dec 02 14:31:46 crc kubenswrapper[4902]: I1202 14:31:46.697621 4902 scope.go:117] "RemoveContainer" containerID="5a566bf3658903add7142268c5b3e349ac12892ef76ba3891420c96516ce1eb9" Dec 02 14:31:46 crc kubenswrapper[4902]: I1202 14:31:46.708349 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vcvq6"] Dec 02 14:31:46 crc kubenswrapper[4902]: I1202 14:31:46.712728 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vcvq6"] Dec 02 14:31:46 crc kubenswrapper[4902]: I1202 14:31:46.726520 4902 scope.go:117] "RemoveContainer" containerID="e3d89ef94bd82208371463e7e92f5f43f0b84d248da036bb079ce069f4553018" Dec 02 14:31:47 crc kubenswrapper[4902]: I1202 14:31:47.124768 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fb0bbc5-03f0-4d35-9727-f26bd725e139" path="/var/lib/kubelet/pods/0fb0bbc5-03f0-4d35-9727-f26bd725e139/volumes" Dec 02 14:31:51 crc kubenswrapper[4902]: I1202 14:31:51.512987 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-jw4ms" Dec 02 14:31:51 crc kubenswrapper[4902]: I1202 14:31:51.513313 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-jw4ms" Dec 02 14:31:51 crc kubenswrapper[4902]: I1202 14:31:51.544900 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-jw4ms" Dec 02 14:31:51 crc kubenswrapper[4902]: I1202 14:31:51.738508 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-jw4ms" Dec 02 14:31:52 crc kubenswrapper[4902]: I1202 14:31:52.682938 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-2rnt6" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.387224 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz"] Dec 02 14:31:53 crc kubenswrapper[4902]: E1202 14:31:53.387500 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fb0bbc5-03f0-4d35-9727-f26bd725e139" containerName="extract-utilities" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.387520 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fb0bbc5-03f0-4d35-9727-f26bd725e139" containerName="extract-utilities" Dec 02 14:31:53 crc kubenswrapper[4902]: E1202 14:31:53.387539 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07d385c8-daa7-4456-ad07-a1f2c296e8e0" containerName="registry-server" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.387549 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="07d385c8-daa7-4456-ad07-a1f2c296e8e0" containerName="registry-server" Dec 02 14:31:53 crc kubenswrapper[4902]: E1202 14:31:53.387585 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fb0bbc5-03f0-4d35-9727-f26bd725e139" containerName="registry-server" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.387594 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fb0bbc5-03f0-4d35-9727-f26bd725e139" containerName="registry-server" Dec 02 14:31:53 crc kubenswrapper[4902]: E1202 14:31:53.387617 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fb0bbc5-03f0-4d35-9727-f26bd725e139" containerName="extract-content" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.387624 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fb0bbc5-03f0-4d35-9727-f26bd725e139" containerName="extract-content" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.387768 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fb0bbc5-03f0-4d35-9727-f26bd725e139" containerName="registry-server" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.387809 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="07d385c8-daa7-4456-ad07-a1f2c296e8e0" containerName="registry-server" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.388906 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.393217 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-sb7xb" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.399684 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz"] Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.543461 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b5a8d6b1-20ad-4539-8696-c8b30997be69-bundle\") pod \"09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz\" (UID: \"b5a8d6b1-20ad-4539-8696-c8b30997be69\") " pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.543597 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b5a8d6b1-20ad-4539-8696-c8b30997be69-util\") pod \"09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz\" (UID: \"b5a8d6b1-20ad-4539-8696-c8b30997be69\") " pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.543647 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7zxg\" (UniqueName: \"kubernetes.io/projected/b5a8d6b1-20ad-4539-8696-c8b30997be69-kube-api-access-h7zxg\") pod \"09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz\" (UID: \"b5a8d6b1-20ad-4539-8696-c8b30997be69\") " pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.645268 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7zxg\" (UniqueName: \"kubernetes.io/projected/b5a8d6b1-20ad-4539-8696-c8b30997be69-kube-api-access-h7zxg\") pod \"09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz\" (UID: \"b5a8d6b1-20ad-4539-8696-c8b30997be69\") " pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.645389 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b5a8d6b1-20ad-4539-8696-c8b30997be69-bundle\") pod \"09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz\" (UID: \"b5a8d6b1-20ad-4539-8696-c8b30997be69\") " pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.645500 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b5a8d6b1-20ad-4539-8696-c8b30997be69-util\") pod \"09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz\" (UID: \"b5a8d6b1-20ad-4539-8696-c8b30997be69\") " pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.646361 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b5a8d6b1-20ad-4539-8696-c8b30997be69-bundle\") pod \"09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz\" (UID: \"b5a8d6b1-20ad-4539-8696-c8b30997be69\") " pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.646380 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b5a8d6b1-20ad-4539-8696-c8b30997be69-util\") pod \"09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz\" (UID: \"b5a8d6b1-20ad-4539-8696-c8b30997be69\") " pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.681622 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7zxg\" (UniqueName: \"kubernetes.io/projected/b5a8d6b1-20ad-4539-8696-c8b30997be69-kube-api-access-h7zxg\") pod \"09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz\" (UID: \"b5a8d6b1-20ad-4539-8696-c8b30997be69\") " pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.709781 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" Dec 02 14:31:53 crc kubenswrapper[4902]: I1202 14:31:53.935779 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz"] Dec 02 14:31:54 crc kubenswrapper[4902]: I1202 14:31:54.748344 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" event={"ID":"b5a8d6b1-20ad-4539-8696-c8b30997be69","Type":"ContainerStarted","Data":"71c5ad57a4cdfa5a42842c11894f58ad393287428b95ac08f73cb618b54c2feb"} Dec 02 14:31:56 crc kubenswrapper[4902]: I1202 14:31:56.764749 4902 generic.go:334] "Generic (PLEG): container finished" podID="b5a8d6b1-20ad-4539-8696-c8b30997be69" containerID="b8a0a104c46faa44da694f4db28f7ee764e8ddac6d24255ae3ced64300754b8c" exitCode=0 Dec 02 14:31:56 crc kubenswrapper[4902]: I1202 14:31:56.764811 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" event={"ID":"b5a8d6b1-20ad-4539-8696-c8b30997be69","Type":"ContainerDied","Data":"b8a0a104c46faa44da694f4db28f7ee764e8ddac6d24255ae3ced64300754b8c"} Dec 02 14:31:57 crc kubenswrapper[4902]: I1202 14:31:57.774005 4902 generic.go:334] "Generic (PLEG): container finished" podID="b5a8d6b1-20ad-4539-8696-c8b30997be69" containerID="6e9e594ad63ab0f0cff6f4ecda7a65bf8c2b7cf22af8923d9918147b1dcc814d" exitCode=0 Dec 02 14:31:57 crc kubenswrapper[4902]: I1202 14:31:57.774104 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" event={"ID":"b5a8d6b1-20ad-4539-8696-c8b30997be69","Type":"ContainerDied","Data":"6e9e594ad63ab0f0cff6f4ecda7a65bf8c2b7cf22af8923d9918147b1dcc814d"} Dec 02 14:31:58 crc kubenswrapper[4902]: I1202 14:31:58.786638 4902 generic.go:334] "Generic (PLEG): container finished" podID="b5a8d6b1-20ad-4539-8696-c8b30997be69" containerID="710b244f5ce746f7836db5ea23fb73e23efe900631e9c8e5cba8f453a6e79055" exitCode=0 Dec 02 14:31:58 crc kubenswrapper[4902]: I1202 14:31:58.786724 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" event={"ID":"b5a8d6b1-20ad-4539-8696-c8b30997be69","Type":"ContainerDied","Data":"710b244f5ce746f7836db5ea23fb73e23efe900631e9c8e5cba8f453a6e79055"} Dec 02 14:32:00 crc kubenswrapper[4902]: I1202 14:32:00.071947 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" Dec 02 14:32:00 crc kubenswrapper[4902]: I1202 14:32:00.234706 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7zxg\" (UniqueName: \"kubernetes.io/projected/b5a8d6b1-20ad-4539-8696-c8b30997be69-kube-api-access-h7zxg\") pod \"b5a8d6b1-20ad-4539-8696-c8b30997be69\" (UID: \"b5a8d6b1-20ad-4539-8696-c8b30997be69\") " Dec 02 14:32:00 crc kubenswrapper[4902]: I1202 14:32:00.234747 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b5a8d6b1-20ad-4539-8696-c8b30997be69-util\") pod \"b5a8d6b1-20ad-4539-8696-c8b30997be69\" (UID: \"b5a8d6b1-20ad-4539-8696-c8b30997be69\") " Dec 02 14:32:00 crc kubenswrapper[4902]: I1202 14:32:00.234794 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b5a8d6b1-20ad-4539-8696-c8b30997be69-bundle\") pod \"b5a8d6b1-20ad-4539-8696-c8b30997be69\" (UID: \"b5a8d6b1-20ad-4539-8696-c8b30997be69\") " Dec 02 14:32:00 crc kubenswrapper[4902]: I1202 14:32:00.236418 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5a8d6b1-20ad-4539-8696-c8b30997be69-bundle" (OuterVolumeSpecName: "bundle") pod "b5a8d6b1-20ad-4539-8696-c8b30997be69" (UID: "b5a8d6b1-20ad-4539-8696-c8b30997be69"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:32:00 crc kubenswrapper[4902]: I1202 14:32:00.242792 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5a8d6b1-20ad-4539-8696-c8b30997be69-kube-api-access-h7zxg" (OuterVolumeSpecName: "kube-api-access-h7zxg") pod "b5a8d6b1-20ad-4539-8696-c8b30997be69" (UID: "b5a8d6b1-20ad-4539-8696-c8b30997be69"). InnerVolumeSpecName "kube-api-access-h7zxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:32:00 crc kubenswrapper[4902]: I1202 14:32:00.248042 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5a8d6b1-20ad-4539-8696-c8b30997be69-util" (OuterVolumeSpecName: "util") pod "b5a8d6b1-20ad-4539-8696-c8b30997be69" (UID: "b5a8d6b1-20ad-4539-8696-c8b30997be69"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:32:00 crc kubenswrapper[4902]: I1202 14:32:00.336393 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7zxg\" (UniqueName: \"kubernetes.io/projected/b5a8d6b1-20ad-4539-8696-c8b30997be69-kube-api-access-h7zxg\") on node \"crc\" DevicePath \"\"" Dec 02 14:32:00 crc kubenswrapper[4902]: I1202 14:32:00.336440 4902 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b5a8d6b1-20ad-4539-8696-c8b30997be69-util\") on node \"crc\" DevicePath \"\"" Dec 02 14:32:00 crc kubenswrapper[4902]: I1202 14:32:00.336462 4902 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b5a8d6b1-20ad-4539-8696-c8b30997be69-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:32:00 crc kubenswrapper[4902]: I1202 14:32:00.806772 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" event={"ID":"b5a8d6b1-20ad-4539-8696-c8b30997be69","Type":"ContainerDied","Data":"71c5ad57a4cdfa5a42842c11894f58ad393287428b95ac08f73cb618b54c2feb"} Dec 02 14:32:00 crc kubenswrapper[4902]: I1202 14:32:00.807954 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71c5ad57a4cdfa5a42842c11894f58ad393287428b95ac08f73cb618b54c2feb" Dec 02 14:32:00 crc kubenswrapper[4902]: I1202 14:32:00.807098 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz" Dec 02 14:32:04 crc kubenswrapper[4902]: I1202 14:32:04.382827 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-fd5ddc88-7hv7g"] Dec 02 14:32:04 crc kubenswrapper[4902]: E1202 14:32:04.383481 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5a8d6b1-20ad-4539-8696-c8b30997be69" containerName="util" Dec 02 14:32:04 crc kubenswrapper[4902]: I1202 14:32:04.383501 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5a8d6b1-20ad-4539-8696-c8b30997be69" containerName="util" Dec 02 14:32:04 crc kubenswrapper[4902]: E1202 14:32:04.383521 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5a8d6b1-20ad-4539-8696-c8b30997be69" containerName="pull" Dec 02 14:32:04 crc kubenswrapper[4902]: I1202 14:32:04.383533 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5a8d6b1-20ad-4539-8696-c8b30997be69" containerName="pull" Dec 02 14:32:04 crc kubenswrapper[4902]: E1202 14:32:04.383550 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5a8d6b1-20ad-4539-8696-c8b30997be69" containerName="extract" Dec 02 14:32:04 crc kubenswrapper[4902]: I1202 14:32:04.383587 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5a8d6b1-20ad-4539-8696-c8b30997be69" containerName="extract" Dec 02 14:32:04 crc kubenswrapper[4902]: I1202 14:32:04.383805 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5a8d6b1-20ad-4539-8696-c8b30997be69" containerName="extract" Dec 02 14:32:04 crc kubenswrapper[4902]: I1202 14:32:04.384425 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-fd5ddc88-7hv7g" Dec 02 14:32:04 crc kubenswrapper[4902]: I1202 14:32:04.386211 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-9f7ft" Dec 02 14:32:04 crc kubenswrapper[4902]: I1202 14:32:04.406230 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-fd5ddc88-7hv7g"] Dec 02 14:32:04 crc kubenswrapper[4902]: I1202 14:32:04.489764 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ln8gg\" (UniqueName: \"kubernetes.io/projected/4cdeb940-6336-43a7-bec3-bf831b83bce7-kube-api-access-ln8gg\") pod \"openstack-operator-controller-operator-fd5ddc88-7hv7g\" (UID: \"4cdeb940-6336-43a7-bec3-bf831b83bce7\") " pod="openstack-operators/openstack-operator-controller-operator-fd5ddc88-7hv7g" Dec 02 14:32:04 crc kubenswrapper[4902]: I1202 14:32:04.591307 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ln8gg\" (UniqueName: \"kubernetes.io/projected/4cdeb940-6336-43a7-bec3-bf831b83bce7-kube-api-access-ln8gg\") pod \"openstack-operator-controller-operator-fd5ddc88-7hv7g\" (UID: \"4cdeb940-6336-43a7-bec3-bf831b83bce7\") " pod="openstack-operators/openstack-operator-controller-operator-fd5ddc88-7hv7g" Dec 02 14:32:04 crc kubenswrapper[4902]: I1202 14:32:04.611618 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ln8gg\" (UniqueName: \"kubernetes.io/projected/4cdeb940-6336-43a7-bec3-bf831b83bce7-kube-api-access-ln8gg\") pod \"openstack-operator-controller-operator-fd5ddc88-7hv7g\" (UID: \"4cdeb940-6336-43a7-bec3-bf831b83bce7\") " pod="openstack-operators/openstack-operator-controller-operator-fd5ddc88-7hv7g" Dec 02 14:32:04 crc kubenswrapper[4902]: I1202 14:32:04.703953 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-fd5ddc88-7hv7g" Dec 02 14:32:05 crc kubenswrapper[4902]: I1202 14:32:05.132752 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-fd5ddc88-7hv7g"] Dec 02 14:32:05 crc kubenswrapper[4902]: W1202 14:32:05.146903 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4cdeb940_6336_43a7_bec3_bf831b83bce7.slice/crio-d8aa2a97634834ac9f3a8c43834eb572560cd5f5db7a362343bee7e976474362 WatchSource:0}: Error finding container d8aa2a97634834ac9f3a8c43834eb572560cd5f5db7a362343bee7e976474362: Status 404 returned error can't find the container with id d8aa2a97634834ac9f3a8c43834eb572560cd5f5db7a362343bee7e976474362 Dec 02 14:32:05 crc kubenswrapper[4902]: I1202 14:32:05.846468 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-fd5ddc88-7hv7g" event={"ID":"4cdeb940-6336-43a7-bec3-bf831b83bce7","Type":"ContainerStarted","Data":"d8aa2a97634834ac9f3a8c43834eb572560cd5f5db7a362343bee7e976474362"} Dec 02 14:32:05 crc kubenswrapper[4902]: I1202 14:32:05.942454 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wcqbz"] Dec 02 14:32:05 crc kubenswrapper[4902]: I1202 14:32:05.943542 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wcqbz" Dec 02 14:32:05 crc kubenswrapper[4902]: I1202 14:32:05.960581 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wcqbz"] Dec 02 14:32:06 crc kubenswrapper[4902]: I1202 14:32:06.110892 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22c1a675-a060-4372-ab73-1c25c3f3fa70-catalog-content\") pod \"community-operators-wcqbz\" (UID: \"22c1a675-a060-4372-ab73-1c25c3f3fa70\") " pod="openshift-marketplace/community-operators-wcqbz" Dec 02 14:32:06 crc kubenswrapper[4902]: I1202 14:32:06.111296 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22c1a675-a060-4372-ab73-1c25c3f3fa70-utilities\") pod \"community-operators-wcqbz\" (UID: \"22c1a675-a060-4372-ab73-1c25c3f3fa70\") " pod="openshift-marketplace/community-operators-wcqbz" Dec 02 14:32:06 crc kubenswrapper[4902]: I1202 14:32:06.111330 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nxzk\" (UniqueName: \"kubernetes.io/projected/22c1a675-a060-4372-ab73-1c25c3f3fa70-kube-api-access-2nxzk\") pod \"community-operators-wcqbz\" (UID: \"22c1a675-a060-4372-ab73-1c25c3f3fa70\") " pod="openshift-marketplace/community-operators-wcqbz" Dec 02 14:32:06 crc kubenswrapper[4902]: I1202 14:32:06.212512 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22c1a675-a060-4372-ab73-1c25c3f3fa70-utilities\") pod \"community-operators-wcqbz\" (UID: \"22c1a675-a060-4372-ab73-1c25c3f3fa70\") " pod="openshift-marketplace/community-operators-wcqbz" Dec 02 14:32:06 crc kubenswrapper[4902]: I1202 14:32:06.212602 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nxzk\" (UniqueName: \"kubernetes.io/projected/22c1a675-a060-4372-ab73-1c25c3f3fa70-kube-api-access-2nxzk\") pod \"community-operators-wcqbz\" (UID: \"22c1a675-a060-4372-ab73-1c25c3f3fa70\") " pod="openshift-marketplace/community-operators-wcqbz" Dec 02 14:32:06 crc kubenswrapper[4902]: I1202 14:32:06.212750 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22c1a675-a060-4372-ab73-1c25c3f3fa70-catalog-content\") pod \"community-operators-wcqbz\" (UID: \"22c1a675-a060-4372-ab73-1c25c3f3fa70\") " pod="openshift-marketplace/community-operators-wcqbz" Dec 02 14:32:06 crc kubenswrapper[4902]: I1202 14:32:06.214121 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22c1a675-a060-4372-ab73-1c25c3f3fa70-catalog-content\") pod \"community-operators-wcqbz\" (UID: \"22c1a675-a060-4372-ab73-1c25c3f3fa70\") " pod="openshift-marketplace/community-operators-wcqbz" Dec 02 14:32:06 crc kubenswrapper[4902]: I1202 14:32:06.216639 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22c1a675-a060-4372-ab73-1c25c3f3fa70-utilities\") pod \"community-operators-wcqbz\" (UID: \"22c1a675-a060-4372-ab73-1c25c3f3fa70\") " pod="openshift-marketplace/community-operators-wcqbz" Dec 02 14:32:06 crc kubenswrapper[4902]: I1202 14:32:06.233154 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nxzk\" (UniqueName: \"kubernetes.io/projected/22c1a675-a060-4372-ab73-1c25c3f3fa70-kube-api-access-2nxzk\") pod \"community-operators-wcqbz\" (UID: \"22c1a675-a060-4372-ab73-1c25c3f3fa70\") " pod="openshift-marketplace/community-operators-wcqbz" Dec 02 14:32:06 crc kubenswrapper[4902]: I1202 14:32:06.269104 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wcqbz" Dec 02 14:32:09 crc kubenswrapper[4902]: I1202 14:32:09.275677 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wcqbz"] Dec 02 14:32:09 crc kubenswrapper[4902]: I1202 14:32:09.881516 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-fd5ddc88-7hv7g" event={"ID":"4cdeb940-6336-43a7-bec3-bf831b83bce7","Type":"ContainerStarted","Data":"919449ab7fc0784a03f35f00ba5b16e9be5ebae2e84fcd346e6b801b102d49c7"} Dec 02 14:32:09 crc kubenswrapper[4902]: I1202 14:32:09.881924 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-fd5ddc88-7hv7g" Dec 02 14:32:09 crc kubenswrapper[4902]: I1202 14:32:09.883915 4902 generic.go:334] "Generic (PLEG): container finished" podID="22c1a675-a060-4372-ab73-1c25c3f3fa70" containerID="0265c94172f116e432efeeb4387560b287f11ae3bbb9f58d865012f8a827a2b9" exitCode=0 Dec 02 14:32:09 crc kubenswrapper[4902]: I1202 14:32:09.883971 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wcqbz" event={"ID":"22c1a675-a060-4372-ab73-1c25c3f3fa70","Type":"ContainerDied","Data":"0265c94172f116e432efeeb4387560b287f11ae3bbb9f58d865012f8a827a2b9"} Dec 02 14:32:09 crc kubenswrapper[4902]: I1202 14:32:09.884027 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wcqbz" event={"ID":"22c1a675-a060-4372-ab73-1c25c3f3fa70","Type":"ContainerStarted","Data":"a867e0437e3d923b7bca81d57c4a8db497af2817f43d7136e4a97fb62615e66e"} Dec 02 14:32:09 crc kubenswrapper[4902]: I1202 14:32:09.925872 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-fd5ddc88-7hv7g" podStartSLOduration=1.917421823 podStartE2EDuration="5.925851624s" podCreationTimestamp="2025-12-02 14:32:04 +0000 UTC" firstStartedPulling="2025-12-02 14:32:05.148735901 +0000 UTC m=+956.340044620" lastFinishedPulling="2025-12-02 14:32:09.157165712 +0000 UTC m=+960.348474421" observedRunningTime="2025-12-02 14:32:09.921777548 +0000 UTC m=+961.113086277" watchObservedRunningTime="2025-12-02 14:32:09.925851624 +0000 UTC m=+961.117160333" Dec 02 14:32:14 crc kubenswrapper[4902]: I1202 14:32:14.708681 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-fd5ddc88-7hv7g" Dec 02 14:32:14 crc kubenswrapper[4902]: I1202 14:32:14.918246 4902 generic.go:334] "Generic (PLEG): container finished" podID="22c1a675-a060-4372-ab73-1c25c3f3fa70" containerID="06a762354e683f28360cc74335f0abc42040b4a370338bf5553ec7794ed4078a" exitCode=0 Dec 02 14:32:14 crc kubenswrapper[4902]: I1202 14:32:14.918295 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wcqbz" event={"ID":"22c1a675-a060-4372-ab73-1c25c3f3fa70","Type":"ContainerDied","Data":"06a762354e683f28360cc74335f0abc42040b4a370338bf5553ec7794ed4078a"} Dec 02 14:32:15 crc kubenswrapper[4902]: I1202 14:32:15.928429 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wcqbz" event={"ID":"22c1a675-a060-4372-ab73-1c25c3f3fa70","Type":"ContainerStarted","Data":"95c51a61079099966645e770535b173c652ed92f0d788c6c858b400d3780edf1"} Dec 02 14:32:15 crc kubenswrapper[4902]: I1202 14:32:15.959888 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wcqbz" podStartSLOduration=5.3204132 podStartE2EDuration="10.959859908s" podCreationTimestamp="2025-12-02 14:32:05 +0000 UTC" firstStartedPulling="2025-12-02 14:32:09.886048242 +0000 UTC m=+961.077356951" lastFinishedPulling="2025-12-02 14:32:15.52549495 +0000 UTC m=+966.716803659" observedRunningTime="2025-12-02 14:32:15.946317573 +0000 UTC m=+967.137626282" watchObservedRunningTime="2025-12-02 14:32:15.959859908 +0000 UTC m=+967.151168657" Dec 02 14:32:16 crc kubenswrapper[4902]: I1202 14:32:16.269980 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wcqbz" Dec 02 14:32:16 crc kubenswrapper[4902]: I1202 14:32:16.270143 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wcqbz" Dec 02 14:32:17 crc kubenswrapper[4902]: I1202 14:32:17.314246 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-wcqbz" podUID="22c1a675-a060-4372-ab73-1c25c3f3fa70" containerName="registry-server" probeResult="failure" output=< Dec 02 14:32:17 crc kubenswrapper[4902]: timeout: failed to connect service ":50051" within 1s Dec 02 14:32:17 crc kubenswrapper[4902]: > Dec 02 14:32:26 crc kubenswrapper[4902]: I1202 14:32:26.319474 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wcqbz" Dec 02 14:32:26 crc kubenswrapper[4902]: I1202 14:32:26.368393 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wcqbz" Dec 02 14:32:26 crc kubenswrapper[4902]: I1202 14:32:26.427043 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wcqbz"] Dec 02 14:32:26 crc kubenswrapper[4902]: I1202 14:32:26.573892 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-grl6v"] Dec 02 14:32:26 crc kubenswrapper[4902]: I1202 14:32:26.574440 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-grl6v" podUID="24b78819-8fca-46b1-91ab-c9b4b5253bbf" containerName="registry-server" containerID="cri-o://e0eb7bd815cbcd835733040a478ec173f3a37278d510d31b35b1bf06858814b6" gracePeriod=2 Dec 02 14:32:29 crc kubenswrapper[4902]: I1202 14:32:29.013234 4902 generic.go:334] "Generic (PLEG): container finished" podID="24b78819-8fca-46b1-91ab-c9b4b5253bbf" containerID="e0eb7bd815cbcd835733040a478ec173f3a37278d510d31b35b1bf06858814b6" exitCode=0 Dec 02 14:32:29 crc kubenswrapper[4902]: I1202 14:32:29.013298 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grl6v" event={"ID":"24b78819-8fca-46b1-91ab-c9b4b5253bbf","Type":"ContainerDied","Data":"e0eb7bd815cbcd835733040a478ec173f3a37278d510d31b35b1bf06858814b6"} Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.012640 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.021794 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grl6v" event={"ID":"24b78819-8fca-46b1-91ab-c9b4b5253bbf","Type":"ContainerDied","Data":"4f6cc1b93db323e06433fdbfa6e0fa15a09e24ffcd4cbe7c408e54aa9f8f8889"} Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.021851 4902 scope.go:117] "RemoveContainer" containerID="e0eb7bd815cbcd835733040a478ec173f3a37278d510d31b35b1bf06858814b6" Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.021879 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grl6v" Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.044188 4902 scope.go:117] "RemoveContainer" containerID="d871237d88f1214acb927ef3fb3ad0a99d1a8af44a2a73bfe77a123550bb461c" Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.061133 4902 scope.go:117] "RemoveContainer" containerID="8ebf0606bb35868eb3cfdb3eb3506e08364bcb8705ce746bebcd31dbc539513e" Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.139912 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24b78819-8fca-46b1-91ab-c9b4b5253bbf-catalog-content\") pod \"24b78819-8fca-46b1-91ab-c9b4b5253bbf\" (UID: \"24b78819-8fca-46b1-91ab-c9b4b5253bbf\") " Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.139957 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24b78819-8fca-46b1-91ab-c9b4b5253bbf-utilities\") pod \"24b78819-8fca-46b1-91ab-c9b4b5253bbf\" (UID: \"24b78819-8fca-46b1-91ab-c9b4b5253bbf\") " Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.139989 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwp29\" (UniqueName: \"kubernetes.io/projected/24b78819-8fca-46b1-91ab-c9b4b5253bbf-kube-api-access-jwp29\") pod \"24b78819-8fca-46b1-91ab-c9b4b5253bbf\" (UID: \"24b78819-8fca-46b1-91ab-c9b4b5253bbf\") " Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.140886 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24b78819-8fca-46b1-91ab-c9b4b5253bbf-utilities" (OuterVolumeSpecName: "utilities") pod "24b78819-8fca-46b1-91ab-c9b4b5253bbf" (UID: "24b78819-8fca-46b1-91ab-c9b4b5253bbf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.148708 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24b78819-8fca-46b1-91ab-c9b4b5253bbf-kube-api-access-jwp29" (OuterVolumeSpecName: "kube-api-access-jwp29") pod "24b78819-8fca-46b1-91ab-c9b4b5253bbf" (UID: "24b78819-8fca-46b1-91ab-c9b4b5253bbf"). InnerVolumeSpecName "kube-api-access-jwp29". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.188372 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24b78819-8fca-46b1-91ab-c9b4b5253bbf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "24b78819-8fca-46b1-91ab-c9b4b5253bbf" (UID: "24b78819-8fca-46b1-91ab-c9b4b5253bbf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.245003 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwp29\" (UniqueName: \"kubernetes.io/projected/24b78819-8fca-46b1-91ab-c9b4b5253bbf-kube-api-access-jwp29\") on node \"crc\" DevicePath \"\"" Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.245325 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24b78819-8fca-46b1-91ab-c9b4b5253bbf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.245339 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24b78819-8fca-46b1-91ab-c9b4b5253bbf-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.356676 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-grl6v"] Dec 02 14:32:30 crc kubenswrapper[4902]: I1202 14:32:30.366159 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-grl6v"] Dec 02 14:32:31 crc kubenswrapper[4902]: I1202 14:32:31.114132 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24b78819-8fca-46b1-91ab-c9b4b5253bbf" path="/var/lib/kubelet/pods/24b78819-8fca-46b1-91ab-c9b4b5253bbf/volumes" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.138443 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-jgk7w"] Dec 02 14:32:35 crc kubenswrapper[4902]: E1202 14:32:35.139350 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24b78819-8fca-46b1-91ab-c9b4b5253bbf" containerName="extract-utilities" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.139366 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="24b78819-8fca-46b1-91ab-c9b4b5253bbf" containerName="extract-utilities" Dec 02 14:32:35 crc kubenswrapper[4902]: E1202 14:32:35.139397 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24b78819-8fca-46b1-91ab-c9b4b5253bbf" containerName="extract-content" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.139404 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="24b78819-8fca-46b1-91ab-c9b4b5253bbf" containerName="extract-content" Dec 02 14:32:35 crc kubenswrapper[4902]: E1202 14:32:35.139415 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24b78819-8fca-46b1-91ab-c9b4b5253bbf" containerName="registry-server" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.139422 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="24b78819-8fca-46b1-91ab-c9b4b5253bbf" containerName="registry-server" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.139552 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="24b78819-8fca-46b1-91ab-c9b4b5253bbf" containerName="registry-server" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.140384 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-jgk7w" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.144176 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-j55rx"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.145503 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-j55rx" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.145778 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-74fnr" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.147814 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-5f8h8" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.150450 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.151629 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.153657 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-gtc8l" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.155997 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-j55rx"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.163763 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.187157 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-5w9nk"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.188511 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5w9nk" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.190711 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-bzx5w" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.213121 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmvwj\" (UniqueName: \"kubernetes.io/projected/605097d2-e1a6-481d-91e2-fd3b638ad7b1-kube-api-access-lmvwj\") pod \"cinder-operator-controller-manager-859b6ccc6-j55rx\" (UID: \"605097d2-e1a6-481d-91e2-fd3b638ad7b1\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-j55rx" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.213227 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6j8d\" (UniqueName: \"kubernetes.io/projected/eb55d3d7-65ac-4f76-9cb1-210cff99a5bd-kube-api-access-m6j8d\") pod \"designate-operator-controller-manager-78b4bc895b-v8th7\" (UID: \"eb55d3d7-65ac-4f76-9cb1-210cff99a5bd\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.213258 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z6lc\" (UniqueName: \"kubernetes.io/projected/ebf2d80f-02cb-4f53-a5b1-67280d3cd74b-kube-api-access-5z6lc\") pod \"barbican-operator-controller-manager-7d9dfd778-jgk7w\" (UID: \"ebf2d80f-02cb-4f53-a5b1-67280d3cd74b\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-jgk7w" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.213355 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-9nw45"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.214686 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-9nw45" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.216644 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-h5zb7" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.233680 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-9nw45"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.241764 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-5w9nk"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.247587 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q688c"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.248859 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q688c" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.251606 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-jj6dx" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.263911 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q688c"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.270804 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-jgk7w"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.278888 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.279941 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.281746 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.282087 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-gmrr9" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.290772 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.301005 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-wdt4v"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.315881 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmvwj\" (UniqueName: \"kubernetes.io/projected/605097d2-e1a6-481d-91e2-fd3b638ad7b1-kube-api-access-lmvwj\") pod \"cinder-operator-controller-manager-859b6ccc6-j55rx\" (UID: \"605097d2-e1a6-481d-91e2-fd3b638ad7b1\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-j55rx" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.315953 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff2f8\" (UniqueName: \"kubernetes.io/projected/9f4cfa25-c827-4b4c-ab57-8dc0221d30b3-kube-api-access-ff2f8\") pod \"glance-operator-controller-manager-77987cd8cd-5w9nk\" (UID: \"9f4cfa25-c827-4b4c-ab57-8dc0221d30b3\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5w9nk" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.315998 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gssp\" (UniqueName: \"kubernetes.io/projected/4ec2dc11-e22a-40c1-926b-d987e05b8d17-kube-api-access-6gssp\") pod \"heat-operator-controller-manager-5f64f6f8bb-9nw45\" (UID: \"4ec2dc11-e22a-40c1-926b-d987e05b8d17\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-9nw45" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.316023 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldvdk\" (UniqueName: \"kubernetes.io/projected/3c045445-0a69-416e-a9c6-868843e4c3e9-kube-api-access-ldvdk\") pod \"horizon-operator-controller-manager-68c6d99b8f-q688c\" (UID: \"3c045445-0a69-416e-a9c6-868843e4c3e9\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q688c" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.316091 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6j8d\" (UniqueName: \"kubernetes.io/projected/eb55d3d7-65ac-4f76-9cb1-210cff99a5bd-kube-api-access-m6j8d\") pod \"designate-operator-controller-manager-78b4bc895b-v8th7\" (UID: \"eb55d3d7-65ac-4f76-9cb1-210cff99a5bd\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.316125 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z6lc\" (UniqueName: \"kubernetes.io/projected/ebf2d80f-02cb-4f53-a5b1-67280d3cd74b-kube-api-access-5z6lc\") pod \"barbican-operator-controller-manager-7d9dfd778-jgk7w\" (UID: \"ebf2d80f-02cb-4f53-a5b1-67280d3cd74b\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-jgk7w" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.327935 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-wdt4v" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.333247 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-56qmv" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.342270 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z6lc\" (UniqueName: \"kubernetes.io/projected/ebf2d80f-02cb-4f53-a5b1-67280d3cd74b-kube-api-access-5z6lc\") pod \"barbican-operator-controller-manager-7d9dfd778-jgk7w\" (UID: \"ebf2d80f-02cb-4f53-a5b1-67280d3cd74b\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-jgk7w" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.346639 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-8bnj2"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.347780 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8bnj2" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.358675 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-wdt4v"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.364935 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-cmw78" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.374127 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6j8d\" (UniqueName: \"kubernetes.io/projected/eb55d3d7-65ac-4f76-9cb1-210cff99a5bd-kube-api-access-m6j8d\") pod \"designate-operator-controller-manager-78b4bc895b-v8th7\" (UID: \"eb55d3d7-65ac-4f76-9cb1-210cff99a5bd\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.376346 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmvwj\" (UniqueName: \"kubernetes.io/projected/605097d2-e1a6-481d-91e2-fd3b638ad7b1-kube-api-access-lmvwj\") pod \"cinder-operator-controller-manager-859b6ccc6-j55rx\" (UID: \"605097d2-e1a6-481d-91e2-fd3b638ad7b1\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-j55rx" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.387697 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-8bnj2"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.395587 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.396769 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.399108 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-gnl4x"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.400227 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-gnl4x" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.400759 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-4pk76" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.402614 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-jn2q8" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.411679 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.417106 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2knv4\" (UniqueName: \"kubernetes.io/projected/4d401ff5-aff3-4d1c-9fae-d56e1fea07f6-kube-api-access-2knv4\") pod \"keystone-operator-controller-manager-7765d96ddf-8bnj2\" (UID: \"4d401ff5-aff3-4d1c-9fae-d56e1fea07f6\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8bnj2" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.417349 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff2f8\" (UniqueName: \"kubernetes.io/projected/9f4cfa25-c827-4b4c-ab57-8dc0221d30b3-kube-api-access-ff2f8\") pod \"glance-operator-controller-manager-77987cd8cd-5w9nk\" (UID: \"9f4cfa25-c827-4b4c-ab57-8dc0221d30b3\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5w9nk" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.417451 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gssp\" (UniqueName: \"kubernetes.io/projected/4ec2dc11-e22a-40c1-926b-d987e05b8d17-kube-api-access-6gssp\") pod \"heat-operator-controller-manager-5f64f6f8bb-9nw45\" (UID: \"4ec2dc11-e22a-40c1-926b-d987e05b8d17\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-9nw45" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.417529 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46s9c\" (UniqueName: \"kubernetes.io/projected/16c02339-b964-4c10-8beb-32402af37c34-kube-api-access-46s9c\") pod \"ironic-operator-controller-manager-6c548fd776-wdt4v\" (UID: \"16c02339-b964-4c10-8beb-32402af37c34\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-wdt4v" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.417613 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldvdk\" (UniqueName: \"kubernetes.io/projected/3c045445-0a69-416e-a9c6-868843e4c3e9-kube-api-access-ldvdk\") pod \"horizon-operator-controller-manager-68c6d99b8f-q688c\" (UID: \"3c045445-0a69-416e-a9c6-868843e4c3e9\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q688c" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.417684 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert\") pod \"infra-operator-controller-manager-57548d458d-bwpd5\" (UID: \"9c65e1ba-dd1f-4d63-ae34-c18525c76bcf\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.417778 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxnk6\" (UniqueName: \"kubernetes.io/projected/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-kube-api-access-lxnk6\") pod \"infra-operator-controller-manager-57548d458d-bwpd5\" (UID: \"9c65e1ba-dd1f-4d63-ae34-c18525c76bcf\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.441204 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-gnl4x"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.450767 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.451878 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.455614 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-nhklg"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.456754 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-nhklg" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.462666 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-zt5xq" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.463190 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.463438 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-t87vx" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.465532 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-jgk7w" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.469292 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-nhklg"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.479233 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-87z5d"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.480421 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-87z5d" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.485379 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff2f8\" (UniqueName: \"kubernetes.io/projected/9f4cfa25-c827-4b4c-ab57-8dc0221d30b3-kube-api-access-ff2f8\") pod \"glance-operator-controller-manager-77987cd8cd-5w9nk\" (UID: \"9f4cfa25-c827-4b4c-ab57-8dc0221d30b3\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5w9nk" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.485692 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-fqj98" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.490055 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-j55rx" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.491963 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldvdk\" (UniqueName: \"kubernetes.io/projected/3c045445-0a69-416e-a9c6-868843e4c3e9-kube-api-access-ldvdk\") pod \"horizon-operator-controller-manager-68c6d99b8f-q688c\" (UID: \"3c045445-0a69-416e-a9c6-868843e4c3e9\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q688c" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.494342 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.495487 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.497891 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-4mrxd" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.499623 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-ghmml"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.501465 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.503209 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gssp\" (UniqueName: \"kubernetes.io/projected/4ec2dc11-e22a-40c1-926b-d987e05b8d17-kube-api-access-6gssp\") pod \"heat-operator-controller-manager-5f64f6f8bb-9nw45\" (UID: \"4ec2dc11-e22a-40c1-926b-d987e05b8d17\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-9nw45" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.508095 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-87z5d"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.509159 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.510483 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-6sf2q" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.517998 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.519316 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnc9r\" (UniqueName: \"kubernetes.io/projected/0d72b6fb-7ab2-4a48-bdca-17f2794daf3e-kube-api-access-wnc9r\") pod \"mariadb-operator-controller-manager-56bbcc9d85-gnl4x\" (UID: \"0d72b6fb-7ab2-4a48-bdca-17f2794daf3e\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-gnl4x" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.519353 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnqd5\" (UniqueName: \"kubernetes.io/projected/5603e150-66d0-4016-a762-fd636f211c11-kube-api-access-xnqd5\") pod \"nova-operator-controller-manager-697bc559fc-nhklg\" (UID: \"5603e150-66d0-4016-a762-fd636f211c11\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-nhklg" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.519386 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jq2rw\" (UniqueName: \"kubernetes.io/projected/57034fef-009f-4593-92cb-67cdde94f9e0-kube-api-access-jq2rw\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-qh5qm\" (UID: \"57034fef-009f-4593-92cb-67cdde94f9e0\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.519412 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46s9c\" (UniqueName: \"kubernetes.io/projected/16c02339-b964-4c10-8beb-32402af37c34-kube-api-access-46s9c\") pod \"ironic-operator-controller-manager-6c548fd776-wdt4v\" (UID: \"16c02339-b964-4c10-8beb-32402af37c34\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-wdt4v" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.519439 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert\") pod \"infra-operator-controller-manager-57548d458d-bwpd5\" (UID: \"9c65e1ba-dd1f-4d63-ae34-c18525c76bcf\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.519458 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxnk6\" (UniqueName: \"kubernetes.io/projected/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-kube-api-access-lxnk6\") pod \"infra-operator-controller-manager-57548d458d-bwpd5\" (UID: \"9c65e1ba-dd1f-4d63-ae34-c18525c76bcf\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.519485 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kv7j\" (UniqueName: \"kubernetes.io/projected/fd510a47-813b-4ec1-953b-cfefa2fb890f-kube-api-access-2kv7j\") pod \"manila-operator-controller-manager-7c79b5df47-xvrql\" (UID: \"fd510a47-813b-4ec1-953b-cfefa2fb890f\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.519497 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.519611 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2knv4\" (UniqueName: \"kubernetes.io/projected/4d401ff5-aff3-4d1c-9fae-d56e1fea07f6-kube-api-access-2knv4\") pod \"keystone-operator-controller-manager-7765d96ddf-8bnj2\" (UID: \"4d401ff5-aff3-4d1c-9fae-d56e1fea07f6\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8bnj2" Dec 02 14:32:35 crc kubenswrapper[4902]: E1202 14:32:35.519845 4902 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 14:32:35 crc kubenswrapper[4902]: E1202 14:32:35.519908 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert podName:9c65e1ba-dd1f-4d63-ae34-c18525c76bcf nodeName:}" failed. No retries permitted until 2025-12-02 14:32:36.019889547 +0000 UTC m=+987.211198256 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert") pod "infra-operator-controller-manager-57548d458d-bwpd5" (UID: "9c65e1ba-dd1f-4d63-ae34-c18525c76bcf") : secret "infra-operator-webhook-server-cert" not found Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.525088 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.525328 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-xzrxc" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.532640 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.532986 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5w9nk" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.538584 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-ghmml"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.545362 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxnk6\" (UniqueName: \"kubernetes.io/projected/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-kube-api-access-lxnk6\") pod \"infra-operator-controller-manager-57548d458d-bwpd5\" (UID: \"9c65e1ba-dd1f-4d63-ae34-c18525c76bcf\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.546706 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-9nw45" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.548440 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.550174 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.560112 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-jdjst" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.560276 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.562386 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46s9c\" (UniqueName: \"kubernetes.io/projected/16c02339-b964-4c10-8beb-32402af37c34-kube-api-access-46s9c\") pod \"ironic-operator-controller-manager-6c548fd776-wdt4v\" (UID: \"16c02339-b964-4c10-8beb-32402af37c34\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-wdt4v" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.565692 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2knv4\" (UniqueName: \"kubernetes.io/projected/4d401ff5-aff3-4d1c-9fae-d56e1fea07f6-kube-api-access-2knv4\") pod \"keystone-operator-controller-manager-7765d96ddf-8bnj2\" (UID: \"4d401ff5-aff3-4d1c-9fae-d56e1fea07f6\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8bnj2" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.565764 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.583648 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.584916 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q688c" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.585401 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.590829 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.596665 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-txzgt" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.645466 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mlrc\" (UniqueName: \"kubernetes.io/projected/fc37cd76-791d-468a-9470-da5138c96d34-kube-api-access-5mlrc\") pod \"swift-operator-controller-manager-5f8c65bbfc-cdphg\" (UID: \"fc37cd76-791d-468a-9470-da5138c96d34\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.651767 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jq2rw\" (UniqueName: \"kubernetes.io/projected/57034fef-009f-4593-92cb-67cdde94f9e0-kube-api-access-jq2rw\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-qh5qm\" (UID: \"57034fef-009f-4593-92cb-67cdde94f9e0\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.651836 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrnz8\" (UniqueName: \"kubernetes.io/projected/05625664-e0a8-4c8c-904e-69e4b3b7df9b-kube-api-access-mrnz8\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r\" (UID: \"05625664-e0a8-4c8c-904e-69e4b3b7df9b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.651903 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r\" (UID: \"05625664-e0a8-4c8c-904e-69e4b3b7df9b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.651989 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kv7j\" (UniqueName: \"kubernetes.io/projected/fd510a47-813b-4ec1-953b-cfefa2fb890f-kube-api-access-2kv7j\") pod \"manila-operator-controller-manager-7c79b5df47-xvrql\" (UID: \"fd510a47-813b-4ec1-953b-cfefa2fb890f\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.652037 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qn5w\" (UniqueName: \"kubernetes.io/projected/3838a55d-af10-4de8-ad85-bdbd8e49ed62-kube-api-access-9qn5w\") pod \"ovn-operator-controller-manager-b6456fdb6-pnphg\" (UID: \"3838a55d-af10-4de8-ad85-bdbd8e49ed62\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.652130 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnc9r\" (UniqueName: \"kubernetes.io/projected/0d72b6fb-7ab2-4a48-bdca-17f2794daf3e-kube-api-access-wnc9r\") pod \"mariadb-operator-controller-manager-56bbcc9d85-gnl4x\" (UID: \"0d72b6fb-7ab2-4a48-bdca-17f2794daf3e\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-gnl4x" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.652167 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrcns\" (UniqueName: \"kubernetes.io/projected/437c097d-1c14-4668-a3c6-86802ed4a253-kube-api-access-qrcns\") pod \"placement-operator-controller-manager-78f8948974-ghmml\" (UID: \"437c097d-1c14-4668-a3c6-86802ed4a253\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.652188 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmjsh\" (UniqueName: \"kubernetes.io/projected/a056d211-44a9-4585-bbf0-fc0413a57099-kube-api-access-vmjsh\") pod \"octavia-operator-controller-manager-998648c74-87z5d\" (UID: \"a056d211-44a9-4585-bbf0-fc0413a57099\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-87z5d" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.652214 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnqd5\" (UniqueName: \"kubernetes.io/projected/5603e150-66d0-4016-a762-fd636f211c11-kube-api-access-xnqd5\") pod \"nova-operator-controller-manager-697bc559fc-nhklg\" (UID: \"5603e150-66d0-4016-a762-fd636f211c11\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-nhklg" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.686969 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jq2rw\" (UniqueName: \"kubernetes.io/projected/57034fef-009f-4593-92cb-67cdde94f9e0-kube-api-access-jq2rw\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-qh5qm\" (UID: \"57034fef-009f-4593-92cb-67cdde94f9e0\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.693201 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnc9r\" (UniqueName: \"kubernetes.io/projected/0d72b6fb-7ab2-4a48-bdca-17f2794daf3e-kube-api-access-wnc9r\") pod \"mariadb-operator-controller-manager-56bbcc9d85-gnl4x\" (UID: \"0d72b6fb-7ab2-4a48-bdca-17f2794daf3e\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-gnl4x" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.694993 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kv7j\" (UniqueName: \"kubernetes.io/projected/fd510a47-813b-4ec1-953b-cfefa2fb890f-kube-api-access-2kv7j\") pod \"manila-operator-controller-manager-7c79b5df47-xvrql\" (UID: \"fd510a47-813b-4ec1-953b-cfefa2fb890f\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.706403 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnqd5\" (UniqueName: \"kubernetes.io/projected/5603e150-66d0-4016-a762-fd636f211c11-kube-api-access-xnqd5\") pod \"nova-operator-controller-manager-697bc559fc-nhklg\" (UID: \"5603e150-66d0-4016-a762-fd636f211c11\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-nhklg" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.710687 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-mhdgs"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.713422 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhdgs" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.729254 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-2dkzd" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.730473 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-wdt4v" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.740644 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8bnj2" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.753275 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qn5w\" (UniqueName: \"kubernetes.io/projected/3838a55d-af10-4de8-ad85-bdbd8e49ed62-kube-api-access-9qn5w\") pod \"ovn-operator-controller-manager-b6456fdb6-pnphg\" (UID: \"3838a55d-af10-4de8-ad85-bdbd8e49ed62\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.753347 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrcns\" (UniqueName: \"kubernetes.io/projected/437c097d-1c14-4668-a3c6-86802ed4a253-kube-api-access-qrcns\") pod \"placement-operator-controller-manager-78f8948974-ghmml\" (UID: \"437c097d-1c14-4668-a3c6-86802ed4a253\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.753372 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmjsh\" (UniqueName: \"kubernetes.io/projected/a056d211-44a9-4585-bbf0-fc0413a57099-kube-api-access-vmjsh\") pod \"octavia-operator-controller-manager-998648c74-87z5d\" (UID: \"a056d211-44a9-4585-bbf0-fc0413a57099\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-87z5d" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.753405 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpqvf\" (UniqueName: \"kubernetes.io/projected/a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c-kube-api-access-qpqvf\") pod \"telemetry-operator-controller-manager-76cc84c6bb-tjm2s\" (UID: \"a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.753432 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mlrc\" (UniqueName: \"kubernetes.io/projected/fc37cd76-791d-468a-9470-da5138c96d34-kube-api-access-5mlrc\") pod \"swift-operator-controller-manager-5f8c65bbfc-cdphg\" (UID: \"fc37cd76-791d-468a-9470-da5138c96d34\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.753452 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrnz8\" (UniqueName: \"kubernetes.io/projected/05625664-e0a8-4c8c-904e-69e4b3b7df9b-kube-api-access-mrnz8\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r\" (UID: \"05625664-e0a8-4c8c-904e-69e4b3b7df9b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.753480 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r\" (UID: \"05625664-e0a8-4c8c-904e-69e4b3b7df9b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:32:35 crc kubenswrapper[4902]: E1202 14:32:35.753644 4902 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 14:32:35 crc kubenswrapper[4902]: E1202 14:32:35.753691 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert podName:05625664-e0a8-4c8c-904e-69e4b3b7df9b nodeName:}" failed. No retries permitted until 2025-12-02 14:32:36.253676483 +0000 UTC m=+987.444985192 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" (UID: "05625664-e0a8-4c8c-904e-69e4b3b7df9b") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.758644 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-mhdgs"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.763022 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.782868 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrnz8\" (UniqueName: \"kubernetes.io/projected/05625664-e0a8-4c8c-904e-69e4b3b7df9b-kube-api-access-mrnz8\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r\" (UID: \"05625664-e0a8-4c8c-904e-69e4b3b7df9b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.784142 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrcns\" (UniqueName: \"kubernetes.io/projected/437c097d-1c14-4668-a3c6-86802ed4a253-kube-api-access-qrcns\") pod \"placement-operator-controller-manager-78f8948974-ghmml\" (UID: \"437c097d-1c14-4668-a3c6-86802ed4a253\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.784648 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qn5w\" (UniqueName: \"kubernetes.io/projected/3838a55d-af10-4de8-ad85-bdbd8e49ed62-kube-api-access-9qn5w\") pod \"ovn-operator-controller-manager-b6456fdb6-pnphg\" (UID: \"3838a55d-af10-4de8-ad85-bdbd8e49ed62\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.785928 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mlrc\" (UniqueName: \"kubernetes.io/projected/fc37cd76-791d-468a-9470-da5138c96d34-kube-api-access-5mlrc\") pod \"swift-operator-controller-manager-5f8c65bbfc-cdphg\" (UID: \"fc37cd76-791d-468a-9470-da5138c96d34\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.811094 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmjsh\" (UniqueName: \"kubernetes.io/projected/a056d211-44a9-4585-bbf0-fc0413a57099-kube-api-access-vmjsh\") pod \"octavia-operator-controller-manager-998648c74-87z5d\" (UID: \"a056d211-44a9-4585-bbf0-fc0413a57099\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-87z5d" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.824415 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.825649 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.830451 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-t65m4" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.842319 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.852056 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-gnl4x" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.856193 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcfls\" (UniqueName: \"kubernetes.io/projected/dd8c8a81-b5af-4a0c-8640-f4455c09abc1-kube-api-access-qcfls\") pod \"test-operator-controller-manager-5854674fcc-mhdgs\" (UID: \"dd8c8a81-b5af-4a0c-8640-f4455c09abc1\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhdgs" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.856284 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpqvf\" (UniqueName: \"kubernetes.io/projected/a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c-kube-api-access-qpqvf\") pod \"telemetry-operator-controller-manager-76cc84c6bb-tjm2s\" (UID: \"a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.879849 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.880780 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpqvf\" (UniqueName: \"kubernetes.io/projected/a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c-kube-api-access-qpqvf\") pod \"telemetry-operator-controller-manager-76cc84c6bb-tjm2s\" (UID: \"a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.920471 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-nhklg" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.922956 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.924105 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.928765 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.928959 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-2njgc" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.929218 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.935454 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5"] Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.946552 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-87z5d" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.958647 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzcdr\" (UniqueName: \"kubernetes.io/projected/0cbcce3d-353f-44fc-896e-75c3a8b58c21-kube-api-access-tzcdr\") pod \"watcher-operator-controller-manager-64fcb99cdb-nkttd\" (UID: \"0cbcce3d-353f-44fc-896e-75c3a8b58c21\") " pod="openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.958697 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcfls\" (UniqueName: \"kubernetes.io/projected/dd8c8a81-b5af-4a0c-8640-f4455c09abc1-kube-api-access-qcfls\") pod \"test-operator-controller-manager-5854674fcc-mhdgs\" (UID: \"dd8c8a81-b5af-4a0c-8640-f4455c09abc1\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhdgs" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.972075 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg" Dec 02 14:32:35 crc kubenswrapper[4902]: I1202 14:32:35.983144 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcfls\" (UniqueName: \"kubernetes.io/projected/dd8c8a81-b5af-4a0c-8640-f4455c09abc1-kube-api-access-qcfls\") pod \"test-operator-controller-manager-5854674fcc-mhdgs\" (UID: \"dd8c8a81-b5af-4a0c-8640-f4455c09abc1\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhdgs" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.000445 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.030188 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s"] Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.031333 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.034618 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-v8cvn" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.036391 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s"] Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.062235 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.063078 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8q9c\" (UniqueName: \"kubernetes.io/projected/d319072a-bef8-4511-a876-dc7c6e59817e-kube-api-access-v8q9c\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.063107 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.063160 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert\") pod \"infra-operator-controller-manager-57548d458d-bwpd5\" (UID: \"9c65e1ba-dd1f-4d63-ae34-c18525c76bcf\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.063180 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzcdr\" (UniqueName: \"kubernetes.io/projected/0cbcce3d-353f-44fc-896e-75c3a8b58c21-kube-api-access-tzcdr\") pod \"watcher-operator-controller-manager-64fcb99cdb-nkttd\" (UID: \"0cbcce3d-353f-44fc-896e-75c3a8b58c21\") " pod="openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.063224 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:36 crc kubenswrapper[4902]: E1202 14:32:36.063336 4902 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 14:32:36 crc kubenswrapper[4902]: E1202 14:32:36.063374 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert podName:9c65e1ba-dd1f-4d63-ae34-c18525c76bcf nodeName:}" failed. No retries permitted until 2025-12-02 14:32:37.063359357 +0000 UTC m=+988.254668056 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert") pod "infra-operator-controller-manager-57548d458d-bwpd5" (UID: "9c65e1ba-dd1f-4d63-ae34-c18525c76bcf") : secret "infra-operator-webhook-server-cert" not found Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.080641 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.089272 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-jgk7w"] Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.094433 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzcdr\" (UniqueName: \"kubernetes.io/projected/0cbcce3d-353f-44fc-896e-75c3a8b58c21-kube-api-access-tzcdr\") pod \"watcher-operator-controller-manager-64fcb99cdb-nkttd\" (UID: \"0cbcce3d-353f-44fc-896e-75c3a8b58c21\") " pod="openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.097776 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhdgs" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.164802 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.164854 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8q9c\" (UniqueName: \"kubernetes.io/projected/d319072a-bef8-4511-a876-dc7c6e59817e-kube-api-access-v8q9c\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.164878 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.164905 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdkw9\" (UniqueName: \"kubernetes.io/projected/a4650041-03e0-4119-b353-281b9200355c-kube-api-access-xdkw9\") pod \"rabbitmq-cluster-operator-manager-668c99d594-gnd6s\" (UID: \"a4650041-03e0-4119-b353-281b9200355c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s" Dec 02 14:32:36 crc kubenswrapper[4902]: E1202 14:32:36.166006 4902 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 14:32:36 crc kubenswrapper[4902]: E1202 14:32:36.166055 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs podName:d319072a-bef8-4511-a876-dc7c6e59817e nodeName:}" failed. No retries permitted until 2025-12-02 14:32:36.666040046 +0000 UTC m=+987.857348755 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs") pod "openstack-operator-controller-manager-575d4674bc-b4fl5" (UID: "d319072a-bef8-4511-a876-dc7c6e59817e") : secret "webhook-server-cert" not found Dec 02 14:32:36 crc kubenswrapper[4902]: E1202 14:32:36.166177 4902 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 14:32:36 crc kubenswrapper[4902]: E1202 14:32:36.166196 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs podName:d319072a-bef8-4511-a876-dc7c6e59817e nodeName:}" failed. No retries permitted until 2025-12-02 14:32:36.66619032 +0000 UTC m=+987.857499029 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs") pod "openstack-operator-controller-manager-575d4674bc-b4fl5" (UID: "d319072a-bef8-4511-a876-dc7c6e59817e") : secret "metrics-server-cert" not found Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.184144 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.191858 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8q9c\" (UniqueName: \"kubernetes.io/projected/d319072a-bef8-4511-a876-dc7c6e59817e-kube-api-access-v8q9c\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.201649 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-j55rx"] Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.213033 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.265817 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdkw9\" (UniqueName: \"kubernetes.io/projected/a4650041-03e0-4119-b353-281b9200355c-kube-api-access-xdkw9\") pod \"rabbitmq-cluster-operator-manager-668c99d594-gnd6s\" (UID: \"a4650041-03e0-4119-b353-281b9200355c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.266187 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r\" (UID: \"05625664-e0a8-4c8c-904e-69e4b3b7df9b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:32:36 crc kubenswrapper[4902]: E1202 14:32:36.266457 4902 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 14:32:36 crc kubenswrapper[4902]: E1202 14:32:36.266512 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert podName:05625664-e0a8-4c8c-904e-69e4b3b7df9b nodeName:}" failed. No retries permitted until 2025-12-02 14:32:37.266494102 +0000 UTC m=+988.457802811 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" (UID: "05625664-e0a8-4c8c-904e-69e4b3b7df9b") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.299134 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdkw9\" (UniqueName: \"kubernetes.io/projected/a4650041-03e0-4119-b353-281b9200355c-kube-api-access-xdkw9\") pod \"rabbitmq-cluster-operator-manager-668c99d594-gnd6s\" (UID: \"a4650041-03e0-4119-b353-281b9200355c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.368395 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-9nw45"] Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.427072 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.677366 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.677725 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:36 crc kubenswrapper[4902]: E1202 14:32:36.677830 4902 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 14:32:36 crc kubenswrapper[4902]: E1202 14:32:36.677843 4902 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 14:32:36 crc kubenswrapper[4902]: E1202 14:32:36.677888 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs podName:d319072a-bef8-4511-a876-dc7c6e59817e nodeName:}" failed. No retries permitted until 2025-12-02 14:32:37.677871987 +0000 UTC m=+988.869180696 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs") pod "openstack-operator-controller-manager-575d4674bc-b4fl5" (UID: "d319072a-bef8-4511-a876-dc7c6e59817e") : secret "webhook-server-cert" not found Dec 02 14:32:36 crc kubenswrapper[4902]: E1202 14:32:36.677902 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs podName:d319072a-bef8-4511-a876-dc7c6e59817e nodeName:}" failed. No retries permitted until 2025-12-02 14:32:37.677896407 +0000 UTC m=+988.869205116 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs") pod "openstack-operator-controller-manager-575d4674bc-b4fl5" (UID: "d319072a-bef8-4511-a876-dc7c6e59817e") : secret "metrics-server-cert" not found Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.808650 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q688c"] Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.837109 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-8bnj2"] Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.856047 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7"] Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.886123 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-wdt4v"] Dec 02 14:32:36 crc kubenswrapper[4902]: W1202 14:32:36.901334 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod16c02339_b964_4c10_8beb_32402af37c34.slice/crio-80b08ba98a274e7de10456882d62c778bab2c0e617d68505d79ef4be6aad0023 WatchSource:0}: Error finding container 80b08ba98a274e7de10456882d62c778bab2c0e617d68505d79ef4be6aad0023: Status 404 returned error can't find the container with id 80b08ba98a274e7de10456882d62c778bab2c0e617d68505d79ef4be6aad0023 Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.924153 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-5w9nk"] Dec 02 14:32:36 crc kubenswrapper[4902]: I1202 14:32:36.986385 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql"] Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.050871 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-gnl4x"] Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.085034 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert\") pod \"infra-operator-controller-manager-57548d458d-bwpd5\" (UID: \"9c65e1ba-dd1f-4d63-ae34-c18525c76bcf\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.085269 4902 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.085357 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert podName:9c65e1ba-dd1f-4d63-ae34-c18525c76bcf nodeName:}" failed. No retries permitted until 2025-12-02 14:32:39.08534022 +0000 UTC m=+990.276648929 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert") pod "infra-operator-controller-manager-57548d458d-bwpd5" (UID: "9c65e1ba-dd1f-4d63-ae34-c18525c76bcf") : secret "infra-operator-webhook-server-cert" not found Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.099669 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-gnl4x" event={"ID":"0d72b6fb-7ab2-4a48-bdca-17f2794daf3e","Type":"ContainerStarted","Data":"89b03c18b0c066a67daaac43f62e049c1078d83f2a56ea4a102f19b830ca89e4"} Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.114721 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-9nw45" event={"ID":"4ec2dc11-e22a-40c1-926b-d987e05b8d17","Type":"ContainerStarted","Data":"eb9ba6c14dccf4ebfb5dfdd0e21ac7ba74a2a26e110e99a70b4e590b44e41c1d"} Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.115030 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8bnj2" event={"ID":"4d401ff5-aff3-4d1c-9fae-d56e1fea07f6","Type":"ContainerStarted","Data":"abed970dfa2983a924add2cd220d871bcb61ebf4cf73d0dd21d0f376c835d002"} Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.115043 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-j55rx" event={"ID":"605097d2-e1a6-481d-91e2-fd3b638ad7b1","Type":"ContainerStarted","Data":"d20b9d2a4e51028dfb5966cacca6081962f368f2006348943f4f01c9621fe948"} Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.115053 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-wdt4v" event={"ID":"16c02339-b964-4c10-8beb-32402af37c34","Type":"ContainerStarted","Data":"80b08ba98a274e7de10456882d62c778bab2c0e617d68505d79ef4be6aad0023"} Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.115062 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql" event={"ID":"fd510a47-813b-4ec1-953b-cfefa2fb890f","Type":"ContainerStarted","Data":"b6e98314f878f7510c33087c728142e64f228fb1a5a6840adddf2892230c8261"} Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.115073 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-jgk7w" event={"ID":"ebf2d80f-02cb-4f53-a5b1-67280d3cd74b","Type":"ContainerStarted","Data":"4f9a830d3e9a5d392a3b1c67673c8c737df30a2c7994858bf71346294ec1532d"} Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.115485 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5w9nk" event={"ID":"9f4cfa25-c827-4b4c-ab57-8dc0221d30b3","Type":"ContainerStarted","Data":"f6d543e8e1ad945ea43c866add901205c11acdc3a63186aca53e4bf9ce735a38"} Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.116359 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7" event={"ID":"eb55d3d7-65ac-4f76-9cb1-210cff99a5bd","Type":"ContainerStarted","Data":"b40b681be1306731733756605e3781c33e12b8d44b648bb07ca81a6318e6b730"} Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.117262 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q688c" event={"ID":"3c045445-0a69-416e-a9c6-868843e4c3e9","Type":"ContainerStarted","Data":"d3370965c89c3859c106227b00f17c9e93c40849e91761cc84bd08d19cb16515"} Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.251415 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg"] Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.282491 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-nhklg"] Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.288941 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r\" (UID: \"05625664-e0a8-4c8c-904e-69e4b3b7df9b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.289146 4902 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.289224 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert podName:05625664-e0a8-4c8c-904e-69e4b3b7df9b nodeName:}" failed. No retries permitted until 2025-12-02 14:32:39.289205186 +0000 UTC m=+990.480513895 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" (UID: "05625664-e0a8-4c8c-904e-69e4b3b7df9b") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.294268 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd"] Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.308046 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg"] Dec 02 14:32:37 crc kubenswrapper[4902]: W1202 14:32:37.315951 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd8c8a81_b5af_4a0c_8640_f4455c09abc1.slice/crio-71eb5235a78da1be71e541ff5b30cde7cae452a69c13122ea4614fc9681ea8b4 WatchSource:0}: Error finding container 71eb5235a78da1be71e541ff5b30cde7cae452a69c13122ea4614fc9681ea8b4: Status 404 returned error can't find the container with id 71eb5235a78da1be71e541ff5b30cde7cae452a69c13122ea4614fc9681ea8b4 Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.323399 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm"] Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.329616 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-mhdgs"] Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.332206 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-ghmml"] Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.334094 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qpqvf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-tjm2s_openstack-operators(a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.334498 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qrcns,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-ghmml_openstack-operators(437c097d-1c14-4668-a3c6-86802ed4a253): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.335530 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9qn5w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-pnphg_openstack-operators(3838a55d-af10-4de8-ad85-bdbd8e49ed62): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.336399 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qpqvf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-tjm2s_openstack-operators(a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.336425 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qrcns,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-ghmml_openstack-operators(437c097d-1c14-4668-a3c6-86802ed4a253): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.337239 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9qn5w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-pnphg_openstack-operators(3838a55d-af10-4de8-ad85-bdbd8e49ed62): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.337402 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s"] Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.337484 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s" podUID="a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c" Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.337518 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" podUID="437c097d-1c14-4668-a3c6-86802ed4a253" Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.338472 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg" podUID="3838a55d-af10-4de8-ad85-bdbd8e49ed62" Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.426304 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-87z5d"] Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.437446 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s"] Dec 02 14:32:37 crc kubenswrapper[4902]: W1202 14:32:37.443188 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda056d211_44a9_4585_bbf0_fc0413a57099.slice/crio-3b18bfa226fb0a8d054041fd8eec3e61a44371fe584b2f505cca970c6851f5b8 WatchSource:0}: Error finding container 3b18bfa226fb0a8d054041fd8eec3e61a44371fe584b2f505cca970c6851f5b8: Status 404 returned error can't find the container with id 3b18bfa226fb0a8d054041fd8eec3e61a44371fe584b2f505cca970c6851f5b8 Dec 02 14:32:37 crc kubenswrapper[4902]: W1202 14:32:37.448048 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4650041_03e0_4119_b353_281b9200355c.slice/crio-9675f03f0208c76f8e00ea94d862a2da745b990aa1f5cd4e54f6924cc76be2a5 WatchSource:0}: Error finding container 9675f03f0208c76f8e00ea94d862a2da745b990aa1f5cd4e54f6924cc76be2a5: Status 404 returned error can't find the container with id 9675f03f0208c76f8e00ea94d862a2da745b990aa1f5cd4e54f6924cc76be2a5 Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.450911 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xdkw9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-gnd6s_openstack-operators(a4650041-03e0-4119-b353-281b9200355c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.452389 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s" podUID="a4650041-03e0-4119-b353-281b9200355c" Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.697941 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:37 crc kubenswrapper[4902]: I1202 14:32:37.698099 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.698175 4902 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.698244 4902 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.698269 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs podName:d319072a-bef8-4511-a876-dc7c6e59817e nodeName:}" failed. No retries permitted until 2025-12-02 14:32:39.698247354 +0000 UTC m=+990.889556083 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs") pod "openstack-operator-controller-manager-575d4674bc-b4fl5" (UID: "d319072a-bef8-4511-a876-dc7c6e59817e") : secret "webhook-server-cert" not found Dec 02 14:32:37 crc kubenswrapper[4902]: E1202 14:32:37.698304 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs podName:d319072a-bef8-4511-a876-dc7c6e59817e nodeName:}" failed. No retries permitted until 2025-12-02 14:32:39.698285325 +0000 UTC m=+990.889594134 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs") pod "openstack-operator-controller-manager-575d4674bc-b4fl5" (UID: "d319072a-bef8-4511-a876-dc7c6e59817e") : secret "metrics-server-cert" not found Dec 02 14:32:38 crc kubenswrapper[4902]: I1202 14:32:38.123621 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd" event={"ID":"0cbcce3d-353f-44fc-896e-75c3a8b58c21","Type":"ContainerStarted","Data":"f166b5f6390e6bfa25abb7a04f6e9329aa1c9024cb22e1c524fd989add294af5"} Dec 02 14:32:38 crc kubenswrapper[4902]: I1202 14:32:38.125880 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-87z5d" event={"ID":"a056d211-44a9-4585-bbf0-fc0413a57099","Type":"ContainerStarted","Data":"3b18bfa226fb0a8d054041fd8eec3e61a44371fe584b2f505cca970c6851f5b8"} Dec 02 14:32:38 crc kubenswrapper[4902]: I1202 14:32:38.127485 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" event={"ID":"437c097d-1c14-4668-a3c6-86802ed4a253","Type":"ContainerStarted","Data":"60c97eb0bc6e4d23a393cfcf86c2921f8b96ed2e1459e10eda8b61cb4e36c7c0"} Dec 02 14:32:38 crc kubenswrapper[4902]: I1202 14:32:38.128645 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s" event={"ID":"a4650041-03e0-4119-b353-281b9200355c","Type":"ContainerStarted","Data":"9675f03f0208c76f8e00ea94d862a2da745b990aa1f5cd4e54f6924cc76be2a5"} Dec 02 14:32:38 crc kubenswrapper[4902]: E1202 14:32:38.133478 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s" podUID="a4650041-03e0-4119-b353-281b9200355c" Dec 02 14:32:38 crc kubenswrapper[4902]: I1202 14:32:38.133813 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg" event={"ID":"3838a55d-af10-4de8-ad85-bdbd8e49ed62","Type":"ContainerStarted","Data":"b26cb1668d87130f13dd5564f26b17a3d13bd05cc8e9f528a9aee76097a93f66"} Dec 02 14:32:38 crc kubenswrapper[4902]: E1202 14:32:38.140744 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" podUID="437c097d-1c14-4668-a3c6-86802ed4a253" Dec 02 14:32:38 crc kubenswrapper[4902]: I1202 14:32:38.141175 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm" event={"ID":"57034fef-009f-4593-92cb-67cdde94f9e0","Type":"ContainerStarted","Data":"471211b072650b1bbd66a258f449a9d581470d3a42cd2d0852c09c9d89e2c309"} Dec 02 14:32:38 crc kubenswrapper[4902]: E1202 14:32:38.142027 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg" podUID="3838a55d-af10-4de8-ad85-bdbd8e49ed62" Dec 02 14:32:38 crc kubenswrapper[4902]: I1202 14:32:38.142653 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-nhklg" event={"ID":"5603e150-66d0-4016-a762-fd636f211c11","Type":"ContainerStarted","Data":"0d48081a52ac8ad415332113435ddd51117bc63fdefebfbb2ca0f809c3cfb99a"} Dec 02 14:32:38 crc kubenswrapper[4902]: I1202 14:32:38.143496 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhdgs" event={"ID":"dd8c8a81-b5af-4a0c-8640-f4455c09abc1","Type":"ContainerStarted","Data":"71eb5235a78da1be71e541ff5b30cde7cae452a69c13122ea4614fc9681ea8b4"} Dec 02 14:32:38 crc kubenswrapper[4902]: I1202 14:32:38.145129 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg" event={"ID":"fc37cd76-791d-468a-9470-da5138c96d34","Type":"ContainerStarted","Data":"8dd967c8f9aba3adacc44a36331268c60d6de17ac23f311e0daf2660470392dc"} Dec 02 14:32:38 crc kubenswrapper[4902]: I1202 14:32:38.148898 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s" event={"ID":"a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c","Type":"ContainerStarted","Data":"41b8982cabd774e31f8ed5a8ff8cb5d25e826961c8f89d0e24d21cc56a127b6b"} Dec 02 14:32:38 crc kubenswrapper[4902]: E1202 14:32:38.154172 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s" podUID="a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c" Dec 02 14:32:39 crc kubenswrapper[4902]: I1202 14:32:39.163762 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert\") pod \"infra-operator-controller-manager-57548d458d-bwpd5\" (UID: \"9c65e1ba-dd1f-4d63-ae34-c18525c76bcf\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" Dec 02 14:32:39 crc kubenswrapper[4902]: E1202 14:32:39.165961 4902 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 14:32:39 crc kubenswrapper[4902]: E1202 14:32:39.166351 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert podName:9c65e1ba-dd1f-4d63-ae34-c18525c76bcf nodeName:}" failed. No retries permitted until 2025-12-02 14:32:43.166145893 +0000 UTC m=+994.357454602 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert") pod "infra-operator-controller-manager-57548d458d-bwpd5" (UID: "9c65e1ba-dd1f-4d63-ae34-c18525c76bcf") : secret "infra-operator-webhook-server-cert" not found Dec 02 14:32:39 crc kubenswrapper[4902]: E1202 14:32:39.207838 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s" podUID="a4650041-03e0-4119-b353-281b9200355c" Dec 02 14:32:39 crc kubenswrapper[4902]: E1202 14:32:39.211803 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" podUID="437c097d-1c14-4668-a3c6-86802ed4a253" Dec 02 14:32:39 crc kubenswrapper[4902]: E1202 14:32:39.211897 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s" podUID="a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c" Dec 02 14:32:39 crc kubenswrapper[4902]: E1202 14:32:39.211955 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg" podUID="3838a55d-af10-4de8-ad85-bdbd8e49ed62" Dec 02 14:32:39 crc kubenswrapper[4902]: I1202 14:32:39.371451 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r\" (UID: \"05625664-e0a8-4c8c-904e-69e4b3b7df9b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:32:39 crc kubenswrapper[4902]: E1202 14:32:39.371670 4902 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 14:32:39 crc kubenswrapper[4902]: E1202 14:32:39.371714 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert podName:05625664-e0a8-4c8c-904e-69e4b3b7df9b nodeName:}" failed. No retries permitted until 2025-12-02 14:32:43.371699506 +0000 UTC m=+994.563008205 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" (UID: "05625664-e0a8-4c8c-904e-69e4b3b7df9b") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 14:32:39 crc kubenswrapper[4902]: I1202 14:32:39.777376 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:39 crc kubenswrapper[4902]: I1202 14:32:39.777442 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:39 crc kubenswrapper[4902]: E1202 14:32:39.777565 4902 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 14:32:39 crc kubenswrapper[4902]: E1202 14:32:39.777653 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs podName:d319072a-bef8-4511-a876-dc7c6e59817e nodeName:}" failed. No retries permitted until 2025-12-02 14:32:43.777636636 +0000 UTC m=+994.968945335 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs") pod "openstack-operator-controller-manager-575d4674bc-b4fl5" (UID: "d319072a-bef8-4511-a876-dc7c6e59817e") : secret "webhook-server-cert" not found Dec 02 14:32:39 crc kubenswrapper[4902]: E1202 14:32:39.777967 4902 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 14:32:39 crc kubenswrapper[4902]: E1202 14:32:39.777993 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs podName:d319072a-bef8-4511-a876-dc7c6e59817e nodeName:}" failed. No retries permitted until 2025-12-02 14:32:43.777985486 +0000 UTC m=+994.969294195 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs") pod "openstack-operator-controller-manager-575d4674bc-b4fl5" (UID: "d319072a-bef8-4511-a876-dc7c6e59817e") : secret "metrics-server-cert" not found Dec 02 14:32:43 crc kubenswrapper[4902]: I1202 14:32:43.239761 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert\") pod \"infra-operator-controller-manager-57548d458d-bwpd5\" (UID: \"9c65e1ba-dd1f-4d63-ae34-c18525c76bcf\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" Dec 02 14:32:43 crc kubenswrapper[4902]: E1202 14:32:43.239961 4902 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 14:32:43 crc kubenswrapper[4902]: E1202 14:32:43.243032 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert podName:9c65e1ba-dd1f-4d63-ae34-c18525c76bcf nodeName:}" failed. No retries permitted until 2025-12-02 14:32:51.243000029 +0000 UTC m=+1002.434308748 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert") pod "infra-operator-controller-manager-57548d458d-bwpd5" (UID: "9c65e1ba-dd1f-4d63-ae34-c18525c76bcf") : secret "infra-operator-webhook-server-cert" not found Dec 02 14:32:43 crc kubenswrapper[4902]: I1202 14:32:43.446635 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r\" (UID: \"05625664-e0a8-4c8c-904e-69e4b3b7df9b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:32:43 crc kubenswrapper[4902]: E1202 14:32:43.446835 4902 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 14:32:43 crc kubenswrapper[4902]: E1202 14:32:43.446908 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert podName:05625664-e0a8-4c8c-904e-69e4b3b7df9b nodeName:}" failed. No retries permitted until 2025-12-02 14:32:51.446890105 +0000 UTC m=+1002.638198824 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" (UID: "05625664-e0a8-4c8c-904e-69e4b3b7df9b") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 14:32:43 crc kubenswrapper[4902]: I1202 14:32:43.853396 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:43 crc kubenswrapper[4902]: I1202 14:32:43.853500 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:43 crc kubenswrapper[4902]: E1202 14:32:43.853714 4902 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 14:32:43 crc kubenswrapper[4902]: E1202 14:32:43.853774 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs podName:d319072a-bef8-4511-a876-dc7c6e59817e nodeName:}" failed. No retries permitted until 2025-12-02 14:32:51.853757692 +0000 UTC m=+1003.045066401 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs") pod "openstack-operator-controller-manager-575d4674bc-b4fl5" (UID: "d319072a-bef8-4511-a876-dc7c6e59817e") : secret "webhook-server-cert" not found Dec 02 14:32:43 crc kubenswrapper[4902]: E1202 14:32:43.854948 4902 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 14:32:43 crc kubenswrapper[4902]: E1202 14:32:43.855077 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs podName:d319072a-bef8-4511-a876-dc7c6e59817e nodeName:}" failed. No retries permitted until 2025-12-02 14:32:51.855048869 +0000 UTC m=+1003.046357588 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs") pod "openstack-operator-controller-manager-575d4674bc-b4fl5" (UID: "d319072a-bef8-4511-a876-dc7c6e59817e") : secret "metrics-server-cert" not found Dec 02 14:32:51 crc kubenswrapper[4902]: I1202 14:32:51.261498 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert\") pod \"infra-operator-controller-manager-57548d458d-bwpd5\" (UID: \"9c65e1ba-dd1f-4d63-ae34-c18525c76bcf\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" Dec 02 14:32:51 crc kubenswrapper[4902]: I1202 14:32:51.281312 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9c65e1ba-dd1f-4d63-ae34-c18525c76bcf-cert\") pod \"infra-operator-controller-manager-57548d458d-bwpd5\" (UID: \"9c65e1ba-dd1f-4d63-ae34-c18525c76bcf\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" Dec 02 14:32:51 crc kubenswrapper[4902]: I1202 14:32:51.466843 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r\" (UID: \"05625664-e0a8-4c8c-904e-69e4b3b7df9b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:32:51 crc kubenswrapper[4902]: E1202 14:32:51.467165 4902 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 14:32:51 crc kubenswrapper[4902]: E1202 14:32:51.467234 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert podName:05625664-e0a8-4c8c-904e-69e4b3b7df9b nodeName:}" failed. No retries permitted until 2025-12-02 14:33:07.467213715 +0000 UTC m=+1018.658522454 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" (UID: "05625664-e0a8-4c8c-904e-69e4b3b7df9b") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 14:32:51 crc kubenswrapper[4902]: I1202 14:32:51.525540 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" Dec 02 14:32:51 crc kubenswrapper[4902]: I1202 14:32:51.872198 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:51 crc kubenswrapper[4902]: I1202 14:32:51.872324 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:32:51 crc kubenswrapper[4902]: E1202 14:32:51.872614 4902 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 14:32:51 crc kubenswrapper[4902]: E1202 14:32:51.872665 4902 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 14:32:51 crc kubenswrapper[4902]: E1202 14:32:51.872692 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs podName:d319072a-bef8-4511-a876-dc7c6e59817e nodeName:}" failed. No retries permitted until 2025-12-02 14:33:07.872666972 +0000 UTC m=+1019.063975721 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs") pod "openstack-operator-controller-manager-575d4674bc-b4fl5" (UID: "d319072a-bef8-4511-a876-dc7c6e59817e") : secret "webhook-server-cert" not found Dec 02 14:32:51 crc kubenswrapper[4902]: E1202 14:32:51.872717 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs podName:d319072a-bef8-4511-a876-dc7c6e59817e nodeName:}" failed. No retries permitted until 2025-12-02 14:33:07.872703193 +0000 UTC m=+1019.064011912 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs") pod "openstack-operator-controller-manager-575d4674bc-b4fl5" (UID: "d319072a-bef8-4511-a876-dc7c6e59817e") : secret "metrics-server-cert" not found Dec 02 14:32:51 crc kubenswrapper[4902]: E1202 14:32:51.888123 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 02 14:32:51 crc kubenswrapper[4902]: E1202 14:32:51.888289 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2knv4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-8bnj2_openstack-operators(4d401ff5-aff3-4d1c-9fae-d56e1fea07f6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 14:32:52 crc kubenswrapper[4902]: E1202 14:32:52.591890 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168" Dec 02 14:32:52 crc kubenswrapper[4902]: E1202 14:32:52.592341 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vmjsh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-87z5d_openstack-operators(a056d211-44a9-4585-bbf0-fc0413a57099): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 14:32:52 crc kubenswrapper[4902]: E1202 14:32:52.665750 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.155:5001/openstack-k8s-operators/watcher-operator:04f2b756afa9e575b2ebfa769c85094b932517cd" Dec 02 14:32:52 crc kubenswrapper[4902]: E1202 14:32:52.665797 4902 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.155:5001/openstack-k8s-operators/watcher-operator:04f2b756afa9e575b2ebfa769c85094b932517cd" Dec 02 14:32:52 crc kubenswrapper[4902]: E1202 14:32:52.665938 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.155:5001/openstack-k8s-operators/watcher-operator:04f2b756afa9e575b2ebfa769c85094b932517cd,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tzcdr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-64fcb99cdb-nkttd_openstack-operators(0cbcce3d-353f-44fc-896e-75c3a8b58c21): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 14:32:54 crc kubenswrapper[4902]: E1202 14:32:54.125986 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 02 14:32:54 crc kubenswrapper[4902]: E1202 14:32:54.126148 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xnqd5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-nhklg_openstack-operators(5603e150-66d0-4016-a762-fd636f211c11): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 14:33:04 crc kubenswrapper[4902]: E1202 14:33:04.167732 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 02 14:33:04 crc kubenswrapper[4902]: E1202 14:33:04.168628 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qrcns,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-ghmml_openstack-operators(437c097d-1c14-4668-a3c6-86802ed4a253): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 14:33:04 crc kubenswrapper[4902]: I1202 14:33:04.379719 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5w9nk" event={"ID":"9f4cfa25-c827-4b4c-ab57-8dc0221d30b3","Type":"ContainerStarted","Data":"7268f97d9c9cf8f81fc6fef3209a5d7d1a207c31b1161651c473b01f40caf10f"} Dec 02 14:33:04 crc kubenswrapper[4902]: I1202 14:33:04.381361 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q688c" event={"ID":"3c045445-0a69-416e-a9c6-868843e4c3e9","Type":"ContainerStarted","Data":"afbf65b2391304dda68871c7009595e7e05c54c5474534928994ac606216a4bf"} Dec 02 14:33:04 crc kubenswrapper[4902]: I1202 14:33:04.382728 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-gnl4x" event={"ID":"0d72b6fb-7ab2-4a48-bdca-17f2794daf3e","Type":"ContainerStarted","Data":"6f7ed9738df8a17daf1b7217cd741b4578c5cb76c9be66fe601be56baaa53497"} Dec 02 14:33:04 crc kubenswrapper[4902]: I1202 14:33:04.435751 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5"] Dec 02 14:33:04 crc kubenswrapper[4902]: E1202 14:33:04.590291 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jq2rw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-qh5qm_openstack-operators(57034fef-009f-4593-92cb-67cdde94f9e0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 14:33:04 crc kubenswrapper[4902]: E1202 14:33:04.591460 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm" podUID="57034fef-009f-4593-92cb-67cdde94f9e0" Dec 02 14:33:04 crc kubenswrapper[4902]: E1202 14:33:04.602481 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5mlrc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-cdphg_openstack-operators(fc37cd76-791d-468a-9470-da5138c96d34): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 14:33:04 crc kubenswrapper[4902]: E1202 14:33:04.602799 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-m6j8d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-78b4bc895b-v8th7_openstack-operators(eb55d3d7-65ac-4f76-9cb1-210cff99a5bd): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 14:33:04 crc kubenswrapper[4902]: E1202 14:33:04.602897 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2kv7j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-xvrql_openstack-operators(fd510a47-813b-4ec1-953b-cfefa2fb890f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 14:33:04 crc kubenswrapper[4902]: E1202 14:33:04.604149 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql" podUID="fd510a47-813b-4ec1-953b-cfefa2fb890f" Dec 02 14:33:04 crc kubenswrapper[4902]: E1202 14:33:04.604260 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg" podUID="fc37cd76-791d-468a-9470-da5138c96d34" Dec 02 14:33:04 crc kubenswrapper[4902]: E1202 14:33:04.604319 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7" podUID="eb55d3d7-65ac-4f76-9cb1-210cff99a5bd" Dec 02 14:33:04 crc kubenswrapper[4902]: E1202 14:33:04.632261 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = reading blob sha256:9f4bff248214d12c7254dc3c25ef82bd14ff143e2a06d159f2a8cc1c9e6ef1fd: Get \"https://quay.io/v2/openstack-k8s-operators/rabbitmq-cluster-operator/blobs/sha256:9f4bff248214d12c7254dc3c25ef82bd14ff143e2a06d159f2a8cc1c9e6ef1fd\": context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 02 14:33:04 crc kubenswrapper[4902]: E1202 14:33:04.632508 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xdkw9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-gnd6s_openstack-operators(a4650041-03e0-4119-b353-281b9200355c): ErrImagePull: rpc error: code = Canceled desc = reading blob sha256:9f4bff248214d12c7254dc3c25ef82bd14ff143e2a06d159f2a8cc1c9e6ef1fd: Get \"https://quay.io/v2/openstack-k8s-operators/rabbitmq-cluster-operator/blobs/sha256:9f4bff248214d12c7254dc3c25ef82bd14ff143e2a06d159f2a8cc1c9e6ef1fd\": context canceled" logger="UnhandledError" Dec 02 14:33:04 crc kubenswrapper[4902]: E1202 14:33:04.633739 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = reading blob sha256:9f4bff248214d12c7254dc3c25ef82bd14ff143e2a06d159f2a8cc1c9e6ef1fd: Get \\\"https://quay.io/v2/openstack-k8s-operators/rabbitmq-cluster-operator/blobs/sha256:9f4bff248214d12c7254dc3c25ef82bd14ff143e2a06d159f2a8cc1c9e6ef1fd\\\": context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s" podUID="a4650041-03e0-4119-b353-281b9200355c" Dec 02 14:33:04 crc kubenswrapper[4902]: I1202 14:33:04.731469 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:33:04 crc kubenswrapper[4902]: I1202 14:33:04.731892 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:33:05 crc kubenswrapper[4902]: I1202 14:33:05.388703 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" event={"ID":"9c65e1ba-dd1f-4d63-ae34-c18525c76bcf","Type":"ContainerStarted","Data":"4a32125862d39bb283eee23fe291f7d85b6d8c3f4d243bd541abdbe35457d78d"} Dec 02 14:33:05 crc kubenswrapper[4902]: I1202 14:33:05.389972 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-wdt4v" event={"ID":"16c02339-b964-4c10-8beb-32402af37c34","Type":"ContainerStarted","Data":"88469a4b623a40dbed87196fd45e016a116ed67f3399a5a5c1179aa473c9602c"} Dec 02 14:33:05 crc kubenswrapper[4902]: I1202 14:33:05.391145 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql" event={"ID":"fd510a47-813b-4ec1-953b-cfefa2fb890f","Type":"ContainerStarted","Data":"4bd3b8df6e434b891eccb701fa179b15dd7e0c90e6b0dfb631b24acfe96a645a"} Dec 02 14:33:05 crc kubenswrapper[4902]: I1202 14:33:05.391304 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql" Dec 02 14:33:05 crc kubenswrapper[4902]: E1202 14:33:05.392313 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql" podUID="fd510a47-813b-4ec1-953b-cfefa2fb890f" Dec 02 14:33:05 crc kubenswrapper[4902]: I1202 14:33:05.392476 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg" event={"ID":"3838a55d-af10-4de8-ad85-bdbd8e49ed62","Type":"ContainerStarted","Data":"8db81153ca1b885a4336171c41c8ccb33c6b0b83992eff6d4e390d4b43901d35"} Dec 02 14:33:05 crc kubenswrapper[4902]: I1202 14:33:05.393913 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm" event={"ID":"57034fef-009f-4593-92cb-67cdde94f9e0","Type":"ContainerStarted","Data":"d5c7bbc4b49e656a2815e3316eee096eabf88e8dd08d1229a94d77871093f010"} Dec 02 14:33:05 crc kubenswrapper[4902]: I1202 14:33:05.394040 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm" Dec 02 14:33:05 crc kubenswrapper[4902]: E1202 14:33:05.395087 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm" podUID="57034fef-009f-4593-92cb-67cdde94f9e0" Dec 02 14:33:05 crc kubenswrapper[4902]: I1202 14:33:05.395220 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7" event={"ID":"eb55d3d7-65ac-4f76-9cb1-210cff99a5bd","Type":"ContainerStarted","Data":"0797f34c1d88b4348dbcef6db46f43f0148f46607c180c2ee9e087fa9d7ed525"} Dec 02 14:33:05 crc kubenswrapper[4902]: I1202 14:33:05.395482 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7" Dec 02 14:33:05 crc kubenswrapper[4902]: E1202 14:33:05.396243 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7" podUID="eb55d3d7-65ac-4f76-9cb1-210cff99a5bd" Dec 02 14:33:05 crc kubenswrapper[4902]: I1202 14:33:05.396279 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg" event={"ID":"fc37cd76-791d-468a-9470-da5138c96d34","Type":"ContainerStarted","Data":"31c82e6300aa4bd15825a457011219e15817a89fa53b01d2221446dc428b3037"} Dec 02 14:33:05 crc kubenswrapper[4902]: I1202 14:33:05.396416 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg" Dec 02 14:33:05 crc kubenswrapper[4902]: E1202 14:33:05.398839 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg" podUID="fc37cd76-791d-468a-9470-da5138c96d34" Dec 02 14:33:05 crc kubenswrapper[4902]: I1202 14:33:05.399402 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-jgk7w" event={"ID":"ebf2d80f-02cb-4f53-a5b1-67280d3cd74b","Type":"ContainerStarted","Data":"0cdc0fe4db658634514219eb84328ffd4e2b8edf8d8dc79aed12ebdf716f01d3"} Dec 02 14:33:05 crc kubenswrapper[4902]: I1202 14:33:05.400897 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-9nw45" event={"ID":"4ec2dc11-e22a-40c1-926b-d987e05b8d17","Type":"ContainerStarted","Data":"335a2824826d8bc6574a415bfc689261c2bda1ab388165ed0746fcfea9765ebf"} Dec 02 14:33:05 crc kubenswrapper[4902]: I1202 14:33:05.402161 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhdgs" event={"ID":"dd8c8a81-b5af-4a0c-8640-f4455c09abc1","Type":"ContainerStarted","Data":"1ea0f070fd04fe43a4f0e7c804a73436ed764be0d8432cd18ad6b381a89ba8ea"} Dec 02 14:33:05 crc kubenswrapper[4902]: I1202 14:33:05.403359 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-j55rx" event={"ID":"605097d2-e1a6-481d-91e2-fd3b638ad7b1","Type":"ContainerStarted","Data":"4b986fd6702b0f0ea70fbdf7be45b6de355af266b18209bd76ec70b1475cc8f8"} Dec 02 14:33:06 crc kubenswrapper[4902]: E1202 14:33:06.370245 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 02 14:33:06 crc kubenswrapper[4902]: E1202 14:33:06.370440 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tzcdr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-64fcb99cdb-nkttd_openstack-operators(0cbcce3d-353f-44fc-896e-75c3a8b58c21): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 02 14:33:06 crc kubenswrapper[4902]: E1202 14:33:06.371643 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd" podUID="0cbcce3d-353f-44fc-896e-75c3a8b58c21" Dec 02 14:33:06 crc kubenswrapper[4902]: E1202 14:33:06.375502 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 02 14:33:06 crc kubenswrapper[4902]: E1202 14:33:06.375673 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2knv4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-8bnj2_openstack-operators(4d401ff5-aff3-4d1c-9fae-d56e1fea07f6): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 02 14:33:06 crc kubenswrapper[4902]: E1202 14:33:06.377505 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8bnj2" podUID="4d401ff5-aff3-4d1c-9fae-d56e1fea07f6" Dec 02 14:33:06 crc kubenswrapper[4902]: E1202 14:33:06.411650 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm" podUID="57034fef-009f-4593-92cb-67cdde94f9e0" Dec 02 14:33:06 crc kubenswrapper[4902]: E1202 14:33:06.411727 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg" podUID="fc37cd76-791d-468a-9470-da5138c96d34" Dec 02 14:33:06 crc kubenswrapper[4902]: E1202 14:33:06.411770 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql" podUID="fd510a47-813b-4ec1-953b-cfefa2fb890f" Dec 02 14:33:06 crc kubenswrapper[4902]: E1202 14:33:06.411805 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7" podUID="eb55d3d7-65ac-4f76-9cb1-210cff99a5bd" Dec 02 14:33:06 crc kubenswrapper[4902]: E1202 14:33:06.424289 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 02 14:33:06 crc kubenswrapper[4902]: E1202 14:33:06.424466 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vmjsh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-87z5d_openstack-operators(a056d211-44a9-4585-bbf0-fc0413a57099): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 02 14:33:06 crc kubenswrapper[4902]: E1202 14:33:06.426757 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-87z5d" podUID="a056d211-44a9-4585-bbf0-fc0413a57099" Dec 02 14:33:07 crc kubenswrapper[4902]: E1202 14:33:07.199904 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd" podUID="0cbcce3d-353f-44fc-896e-75c3a8b58c21" Dec 02 14:33:07 crc kubenswrapper[4902]: I1202 14:33:07.428971 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd" event={"ID":"0cbcce3d-353f-44fc-896e-75c3a8b58c21","Type":"ContainerStarted","Data":"5ab7317c9083b40ff810664c6f986895560e8fb331a0552f07ca9c6a69bbf05b"} Dec 02 14:33:07 crc kubenswrapper[4902]: I1202 14:33:07.429694 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd" Dec 02 14:33:07 crc kubenswrapper[4902]: E1202 14:33:07.430418 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd" podUID="0cbcce3d-353f-44fc-896e-75c3a8b58c21" Dec 02 14:33:07 crc kubenswrapper[4902]: I1202 14:33:07.433405 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s" event={"ID":"a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c","Type":"ContainerStarted","Data":"32ae15b5ee5074b3acabb613c428cdbc0b00a848d1bd3ef9932fc75a37b79c0d"} Dec 02 14:33:07 crc kubenswrapper[4902]: I1202 14:33:07.556333 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r\" (UID: \"05625664-e0a8-4c8c-904e-69e4b3b7df9b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:33:07 crc kubenswrapper[4902]: I1202 14:33:07.584609 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05625664-e0a8-4c8c-904e-69e4b3b7df9b-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r\" (UID: \"05625664-e0a8-4c8c-904e-69e4b3b7df9b\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:33:07 crc kubenswrapper[4902]: I1202 14:33:07.814717 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:33:07 crc kubenswrapper[4902]: I1202 14:33:07.961932 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:33:07 crc kubenswrapper[4902]: I1202 14:33:07.962037 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:33:07 crc kubenswrapper[4902]: I1202 14:33:07.967480 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-metrics-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:33:07 crc kubenswrapper[4902]: I1202 14:33:07.967880 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d319072a-bef8-4511-a876-dc7c6e59817e-webhook-certs\") pod \"openstack-operator-controller-manager-575d4674bc-b4fl5\" (UID: \"d319072a-bef8-4511-a876-dc7c6e59817e\") " pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:33:08 crc kubenswrapper[4902]: I1202 14:33:08.078755 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:33:08 crc kubenswrapper[4902]: E1202 14:33:08.440667 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd" podUID="0cbcce3d-353f-44fc-896e-75c3a8b58c21" Dec 02 14:33:09 crc kubenswrapper[4902]: I1202 14:33:09.533301 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r"] Dec 02 14:33:09 crc kubenswrapper[4902]: I1202 14:33:09.619699 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5"] Dec 02 14:33:09 crc kubenswrapper[4902]: W1202 14:33:09.933968 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod05625664_e0a8_4c8c_904e_69e4b3b7df9b.slice/crio-ecec26ec3f66513e7d2abd19f63375c3aee65d85cc4796d12335ad61fdd4983f WatchSource:0}: Error finding container ecec26ec3f66513e7d2abd19f63375c3aee65d85cc4796d12335ad61fdd4983f: Status 404 returned error can't find the container with id ecec26ec3f66513e7d2abd19f63375c3aee65d85cc4796d12335ad61fdd4983f Dec 02 14:33:10 crc kubenswrapper[4902]: I1202 14:33:10.477670 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8bnj2" event={"ID":"4d401ff5-aff3-4d1c-9fae-d56e1fea07f6","Type":"ContainerStarted","Data":"be83ff151509d38a0fec59770e23cef9bc1512164cd6af028f34d2a45603a656"} Dec 02 14:33:10 crc kubenswrapper[4902]: I1202 14:33:10.488775 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" event={"ID":"d319072a-bef8-4511-a876-dc7c6e59817e","Type":"ContainerStarted","Data":"88b3ca74fc96a63e2bb875d6daba52ced45815ca8d7cfd9066efbd8067abde03"} Dec 02 14:33:10 crc kubenswrapper[4902]: I1202 14:33:10.488842 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" event={"ID":"d319072a-bef8-4511-a876-dc7c6e59817e","Type":"ContainerStarted","Data":"7da5838218b0b0ed4175d432e6e73184bebc5658c734c4e576902447553a048e"} Dec 02 14:33:10 crc kubenswrapper[4902]: I1202 14:33:10.488913 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:33:10 crc kubenswrapper[4902]: I1202 14:33:10.494447 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" event={"ID":"9c65e1ba-dd1f-4d63-ae34-c18525c76bcf","Type":"ContainerStarted","Data":"82ac00e6aaede0d76b4d955b7e1269574ba10c0274676088f47c359b4aef9a04"} Dec 02 14:33:10 crc kubenswrapper[4902]: I1202 14:33:10.518135 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" podStartSLOduration=35.518120812 podStartE2EDuration="35.518120812s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:33:10.516356221 +0000 UTC m=+1021.707664930" watchObservedRunningTime="2025-12-02 14:33:10.518120812 +0000 UTC m=+1021.709429521" Dec 02 14:33:10 crc kubenswrapper[4902]: I1202 14:33:10.522765 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" event={"ID":"05625664-e0a8-4c8c-904e-69e4b3b7df9b","Type":"ContainerStarted","Data":"ecec26ec3f66513e7d2abd19f63375c3aee65d85cc4796d12335ad61fdd4983f"} Dec 02 14:33:10 crc kubenswrapper[4902]: I1202 14:33:10.527826 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-87z5d" event={"ID":"a056d211-44a9-4585-bbf0-fc0413a57099","Type":"ContainerStarted","Data":"970db266ac1406d476454193aad6cf42f7dd8b283419f69ae4311c7129144152"} Dec 02 14:33:11 crc kubenswrapper[4902]: E1202 14:33:11.083542 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" podUID="437c097d-1c14-4668-a3c6-86802ed4a253" Dec 02 14:33:11 crc kubenswrapper[4902]: E1202 14:33:11.262895 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-nhklg" podUID="5603e150-66d0-4016-a762-fd636f211c11" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.539608 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" event={"ID":"9c65e1ba-dd1f-4d63-ae34-c18525c76bcf","Type":"ContainerStarted","Data":"af6e97a0d69a293cdbc0dd6ffeb1fb6d8b3c6218c617f0661d1a3ae7b194243e"} Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.540452 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.554463 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5w9nk" event={"ID":"9f4cfa25-c827-4b4c-ab57-8dc0221d30b3","Type":"ContainerStarted","Data":"aab7b4ae71fc3f92b99d9be76b71954d53c49b0598b011236f68b268b845dda4"} Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.554683 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5w9nk" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.556318 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8bnj2" event={"ID":"4d401ff5-aff3-4d1c-9fae-d56e1fea07f6","Type":"ContainerStarted","Data":"0f99e6b5f58464dbf2fcbe124c2a9718b4092f07b85753672af8b47086ec4024"} Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.556476 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8bnj2" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.558111 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5w9nk" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.567103 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-j55rx" event={"ID":"605097d2-e1a6-481d-91e2-fd3b638ad7b1","Type":"ContainerStarted","Data":"faa56cde0894d595db72876ba3520d64bfba138d2439eaaadfd3ea3c9f394af6"} Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.567360 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-j55rx" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.569325 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-j55rx" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.572335 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s" event={"ID":"a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c","Type":"ContainerStarted","Data":"4f518d07daea2026b449e572222fe01e33ea827e48238b255722fe7a901d8214"} Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.572528 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.575150 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-gnl4x" event={"ID":"0d72b6fb-7ab2-4a48-bdca-17f2794daf3e","Type":"ContainerStarted","Data":"6ea3be1d9b2edd142c47c02fb8096ba3af84e888d9e815a351715dfcf4a5afc8"} Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.575441 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-gnl4x" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.577606 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-gnl4x" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.578233 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg" event={"ID":"3838a55d-af10-4de8-ad85-bdbd8e49ed62","Type":"ContainerStarted","Data":"d0902bd1405c2caf843e87a3c5897dd1b155545f15d36ef4c886bdf7c3a1fff7"} Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.578900 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.580896 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-wdt4v" event={"ID":"16c02339-b964-4c10-8beb-32402af37c34","Type":"ContainerStarted","Data":"75605f329d62c8e6039f69954b4c2a95fe5b35bb4c1679cf3794eaaf6e7a3923"} Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.581235 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.581391 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-wdt4v" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.581950 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.583228 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-87z5d" event={"ID":"a056d211-44a9-4585-bbf0-fc0413a57099","Type":"ContainerStarted","Data":"5ce504c0c07fa8b42ae86414af32b37ece529bb130c4ed765985ac89f5d0475a"} Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.583371 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-87z5d" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.583438 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-wdt4v" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.586653 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhdgs" event={"ID":"dd8c8a81-b5af-4a0c-8640-f4455c09abc1","Type":"ContainerStarted","Data":"734d2bf64450af10a38c6262e38f5916ecdac40a5c26392cee10ff6245976cd5"} Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.586870 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhdgs" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.589145 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" event={"ID":"437c097d-1c14-4668-a3c6-86802ed4a253","Type":"ContainerStarted","Data":"182e43982d854a1903a40983ddfb180246ce375ba6ea29e4eb87530cb4211b13"} Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.589640 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhdgs" Dec 02 14:33:11 crc kubenswrapper[4902]: E1202 14:33:11.592152 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" podUID="437c097d-1c14-4668-a3c6-86802ed4a253" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.592691 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" podStartSLOduration=31.783670599 podStartE2EDuration="36.592678479s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:33:04.47188186 +0000 UTC m=+1015.663190569" lastFinishedPulling="2025-12-02 14:33:09.28088974 +0000 UTC m=+1020.472198449" observedRunningTime="2025-12-02 14:33:11.563009876 +0000 UTC m=+1022.754318585" watchObservedRunningTime="2025-12-02 14:33:11.592678479 +0000 UTC m=+1022.783987188" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.596267 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8bnj2" podStartSLOduration=4.266020522 podStartE2EDuration="36.596259891s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:36.857717509 +0000 UTC m=+988.049026218" lastFinishedPulling="2025-12-02 14:33:09.187956868 +0000 UTC m=+1020.379265587" observedRunningTime="2025-12-02 14:33:11.590915839 +0000 UTC m=+1022.782224548" watchObservedRunningTime="2025-12-02 14:33:11.596259891 +0000 UTC m=+1022.787568600" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.599809 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q688c" event={"ID":"3c045445-0a69-416e-a9c6-868843e4c3e9","Type":"ContainerStarted","Data":"71bf54d590c0131855d67d2bb20deb69980262e487fe878b021a252a8df0e43a"} Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.600718 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q688c" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.603212 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q688c" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.603409 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-nhklg" event={"ID":"5603e150-66d0-4016-a762-fd636f211c11","Type":"ContainerStarted","Data":"ef9935ec9ea2c3fa167a400513fc69c122dbfd02e7bd393ccf67d62946536362"} Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.614086 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5w9nk" podStartSLOduration=3.305320101 podStartE2EDuration="36.614070967s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:36.936912921 +0000 UTC m=+988.128221630" lastFinishedPulling="2025-12-02 14:33:10.245663787 +0000 UTC m=+1021.436972496" observedRunningTime="2025-12-02 14:33:11.60994216 +0000 UTC m=+1022.801250869" watchObservedRunningTime="2025-12-02 14:33:11.614070967 +0000 UTC m=+1022.805379666" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.619400 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-9nw45" event={"ID":"4ec2dc11-e22a-40c1-926b-d987e05b8d17","Type":"ContainerStarted","Data":"d72fe95e3904348de4f5faf5d12cb3274d2242b89843c8c38393d2be3d6b4701"} Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.620417 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-9nw45" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.626757 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-9nw45" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.640686 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-tjm2s" podStartSLOduration=3.486575855 podStartE2EDuration="36.640669044s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:37.333973578 +0000 UTC m=+988.525282287" lastFinishedPulling="2025-12-02 14:33:10.488066767 +0000 UTC m=+1021.679375476" observedRunningTime="2025-12-02 14:33:11.640041486 +0000 UTC m=+1022.831350195" watchObservedRunningTime="2025-12-02 14:33:11.640669044 +0000 UTC m=+1022.831977743" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.656045 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-jgk7w" event={"ID":"ebf2d80f-02cb-4f53-a5b1-67280d3cd74b","Type":"ContainerStarted","Data":"ce0ad14fa6e4c3adbe56c43fcbe54957826815c1ae6f4b54b1a3f3a90faf19ea"} Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.704923 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-wdt4v" podStartSLOduration=3.4104533200000002 podStartE2EDuration="36.70490397s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:36.911070776 +0000 UTC m=+988.102379485" lastFinishedPulling="2025-12-02 14:33:10.205521426 +0000 UTC m=+1021.396830135" observedRunningTime="2025-12-02 14:33:11.703780768 +0000 UTC m=+1022.895089477" watchObservedRunningTime="2025-12-02 14:33:11.70490397 +0000 UTC m=+1022.896212679" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.773691 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-87z5d" podStartSLOduration=5.007539443 podStartE2EDuration="36.773672435s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:37.444506821 +0000 UTC m=+988.635815530" lastFinishedPulling="2025-12-02 14:33:09.210639813 +0000 UTC m=+1020.401948522" observedRunningTime="2025-12-02 14:33:11.772167772 +0000 UTC m=+1022.963476481" watchObservedRunningTime="2025-12-02 14:33:11.773672435 +0000 UTC m=+1022.964981144" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.803167 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-gnl4x" podStartSLOduration=3.719327521 podStartE2EDuration="36.803151153s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:37.072981399 +0000 UTC m=+988.264290108" lastFinishedPulling="2025-12-02 14:33:10.156805031 +0000 UTC m=+1021.348113740" observedRunningTime="2025-12-02 14:33:11.797275706 +0000 UTC m=+1022.988584415" watchObservedRunningTime="2025-12-02 14:33:11.803151153 +0000 UTC m=+1022.994459862" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.864133 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q688c" podStartSLOduration=3.501226101 podStartE2EDuration="36.864111566s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:36.819108052 +0000 UTC m=+988.010416761" lastFinishedPulling="2025-12-02 14:33:10.181993517 +0000 UTC m=+1021.373302226" observedRunningTime="2025-12-02 14:33:11.828959026 +0000 UTC m=+1023.020267735" watchObservedRunningTime="2025-12-02 14:33:11.864111566 +0000 UTC m=+1023.055420275" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.869306 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pnphg" podStartSLOduration=4.059067209 podStartE2EDuration="36.869285063s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:37.334557045 +0000 UTC m=+988.525865754" lastFinishedPulling="2025-12-02 14:33:10.144774899 +0000 UTC m=+1021.336083608" observedRunningTime="2025-12-02 14:33:11.859652249 +0000 UTC m=+1023.050960948" watchObservedRunningTime="2025-12-02 14:33:11.869285063 +0000 UTC m=+1023.060593772" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.908607 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhdgs" podStartSLOduration=4.06051034 podStartE2EDuration="36.90858532s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:37.321147024 +0000 UTC m=+988.512455733" lastFinishedPulling="2025-12-02 14:33:10.169222004 +0000 UTC m=+1021.360530713" observedRunningTime="2025-12-02 14:33:11.885839123 +0000 UTC m=+1023.077147832" watchObservedRunningTime="2025-12-02 14:33:11.90858532 +0000 UTC m=+1023.099894029" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.959138 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-j55rx" podStartSLOduration=2.888967086 podStartE2EDuration="36.959119847s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:36.305256924 +0000 UTC m=+987.496565633" lastFinishedPulling="2025-12-02 14:33:10.375409685 +0000 UTC m=+1021.566718394" observedRunningTime="2025-12-02 14:33:11.934997621 +0000 UTC m=+1023.126306330" watchObservedRunningTime="2025-12-02 14:33:11.959119847 +0000 UTC m=+1023.150428556" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.969494 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-9nw45" podStartSLOduration=3.348232511 podStartE2EDuration="36.969469861s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:36.546724888 +0000 UTC m=+987.738033597" lastFinishedPulling="2025-12-02 14:33:10.167962238 +0000 UTC m=+1021.359270947" observedRunningTime="2025-12-02 14:33:11.959523008 +0000 UTC m=+1023.150831737" watchObservedRunningTime="2025-12-02 14:33:11.969469861 +0000 UTC m=+1023.160778570" Dec 02 14:33:11 crc kubenswrapper[4902]: I1202 14:33:11.977090 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-jgk7w" podStartSLOduration=3.029376156 podStartE2EDuration="36.977071167s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:36.212799935 +0000 UTC m=+987.404108644" lastFinishedPulling="2025-12-02 14:33:10.160494946 +0000 UTC m=+1021.351803655" observedRunningTime="2025-12-02 14:33:11.976040868 +0000 UTC m=+1023.167349577" watchObservedRunningTime="2025-12-02 14:33:11.977071167 +0000 UTC m=+1023.168379876" Dec 02 14:33:12 crc kubenswrapper[4902]: I1202 14:33:12.670325 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-nhklg" event={"ID":"5603e150-66d0-4016-a762-fd636f211c11","Type":"ContainerStarted","Data":"b3740eb18f2ff100dfd71b77990d8ee417071b96d48135b49fe6b0b82bca9f34"} Dec 02 14:33:12 crc kubenswrapper[4902]: I1202 14:33:12.671870 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-jgk7w" Dec 02 14:33:12 crc kubenswrapper[4902]: I1202 14:33:12.673940 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-jgk7w" Dec 02 14:33:12 crc kubenswrapper[4902]: I1202 14:33:12.692399 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-nhklg" podStartSLOduration=2.674917971 podStartE2EDuration="37.692379602s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:37.290787461 +0000 UTC m=+988.482096170" lastFinishedPulling="2025-12-02 14:33:12.308249092 +0000 UTC m=+1023.499557801" observedRunningTime="2025-12-02 14:33:12.687068841 +0000 UTC m=+1023.878377560" watchObservedRunningTime="2025-12-02 14:33:12.692379602 +0000 UTC m=+1023.883688311" Dec 02 14:33:13 crc kubenswrapper[4902]: I1202 14:33:13.679308 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" event={"ID":"05625664-e0a8-4c8c-904e-69e4b3b7df9b","Type":"ContainerStarted","Data":"74ab16fc8d7d2f72bf1e87501590d5bfdf271537fb48cc94798904d80f845506"} Dec 02 14:33:13 crc kubenswrapper[4902]: I1202 14:33:13.680602 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-nhklg" Dec 02 14:33:13 crc kubenswrapper[4902]: I1202 14:33:13.680641 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" event={"ID":"05625664-e0a8-4c8c-904e-69e4b3b7df9b","Type":"ContainerStarted","Data":"0e238da459566c9ce6b70042a27648632bbc10d7433772eb2a2749cd705ee162"} Dec 02 14:33:13 crc kubenswrapper[4902]: I1202 14:33:13.718428 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" podStartSLOduration=35.443184672 podStartE2EDuration="38.71840326s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:33:09.935761997 +0000 UTC m=+1021.127070706" lastFinishedPulling="2025-12-02 14:33:13.210980585 +0000 UTC m=+1024.402289294" observedRunningTime="2025-12-02 14:33:13.710515805 +0000 UTC m=+1024.901824514" watchObservedRunningTime="2025-12-02 14:33:13.71840326 +0000 UTC m=+1024.909711969" Dec 02 14:33:14 crc kubenswrapper[4902]: I1202 14:33:14.688859 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:33:15 crc kubenswrapper[4902]: I1202 14:33:15.512990 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7" Dec 02 14:33:15 crc kubenswrapper[4902]: I1202 14:33:15.742548 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8bnj2" Dec 02 14:33:15 crc kubenswrapper[4902]: I1202 14:33:15.765594 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql" Dec 02 14:33:15 crc kubenswrapper[4902]: I1202 14:33:15.883230 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm" Dec 02 14:33:15 crc kubenswrapper[4902]: I1202 14:33:15.949062 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-87z5d" Dec 02 14:33:16 crc kubenswrapper[4902]: I1202 14:33:16.066211 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg" Dec 02 14:33:16 crc kubenswrapper[4902]: E1202 14:33:16.107297 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s" podUID="a4650041-03e0-4119-b353-281b9200355c" Dec 02 14:33:16 crc kubenswrapper[4902]: I1202 14:33:16.188925 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd" Dec 02 14:33:16 crc kubenswrapper[4902]: I1202 14:33:16.710156 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7" event={"ID":"eb55d3d7-65ac-4f76-9cb1-210cff99a5bd","Type":"ContainerStarted","Data":"f9fbc62c25deeab521a6be01eb66eac786f2ad112a9e915e1d1b8b14fa6eca29"} Dec 02 14:33:16 crc kubenswrapper[4902]: I1202 14:33:16.711905 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd" event={"ID":"0cbcce3d-353f-44fc-896e-75c3a8b58c21","Type":"ContainerStarted","Data":"582bfefbc54c23d88eadab946cbf59ac81012ada55d3a40635248e44937f6ff3"} Dec 02 14:33:16 crc kubenswrapper[4902]: I1202 14:33:16.713810 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg" event={"ID":"fc37cd76-791d-468a-9470-da5138c96d34","Type":"ContainerStarted","Data":"e426edba371923b1e592d1cafa95d06f8bc6a9889ee1dfa6d1bbb87ec06c968f"} Dec 02 14:33:16 crc kubenswrapper[4902]: I1202 14:33:16.715367 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql" event={"ID":"fd510a47-813b-4ec1-953b-cfefa2fb890f","Type":"ContainerStarted","Data":"ec6e7314cae7d69d7e762bbde669c3f15c08b2ad48f698f60b65b957cb8c842e"} Dec 02 14:33:16 crc kubenswrapper[4902]: I1202 14:33:16.716998 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm" event={"ID":"57034fef-009f-4593-92cb-67cdde94f9e0","Type":"ContainerStarted","Data":"b015c3af4a2f36bd0a0bb7e3b70fe14d476232e256b2dd22e2ed32c080740f2c"} Dec 02 14:33:16 crc kubenswrapper[4902]: I1202 14:33:16.764543 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-64fcb99cdb-nkttd" podStartSLOduration=12.026093735 podStartE2EDuration="41.764521014s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:37.30272882 +0000 UTC m=+988.494037529" lastFinishedPulling="2025-12-02 14:33:07.041156099 +0000 UTC m=+1018.232464808" observedRunningTime="2025-12-02 14:33:16.762522978 +0000 UTC m=+1027.953831687" watchObservedRunningTime="2025-12-02 14:33:16.764521014 +0000 UTC m=+1027.955829723" Dec 02 14:33:16 crc kubenswrapper[4902]: I1202 14:33:16.766842 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-v8th7" podStartSLOduration=24.506064364 podStartE2EDuration="41.76683022s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:36.866859339 +0000 UTC m=+988.058168048" lastFinishedPulling="2025-12-02 14:32:54.127625195 +0000 UTC m=+1005.318933904" observedRunningTime="2025-12-02 14:33:16.744536736 +0000 UTC m=+1027.935845455" watchObservedRunningTime="2025-12-02 14:33:16.76683022 +0000 UTC m=+1027.958138929" Dec 02 14:33:16 crc kubenswrapper[4902]: I1202 14:33:16.796686 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-xvrql" podStartSLOduration=24.688810159 podStartE2EDuration="41.796663768s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:37.021940308 +0000 UTC m=+988.213249017" lastFinishedPulling="2025-12-02 14:32:54.129793917 +0000 UTC m=+1005.321102626" observedRunningTime="2025-12-02 14:33:16.791142531 +0000 UTC m=+1027.982451270" watchObservedRunningTime="2025-12-02 14:33:16.796663768 +0000 UTC m=+1027.987972497" Dec 02 14:33:16 crc kubenswrapper[4902]: I1202 14:33:16.826768 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-qh5qm" podStartSLOduration=16.99394677 podStartE2EDuration="41.826748003s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:37.302433162 +0000 UTC m=+988.493741871" lastFinishedPulling="2025-12-02 14:33:02.135234375 +0000 UTC m=+1013.326543104" observedRunningTime="2025-12-02 14:33:16.823361187 +0000 UTC m=+1028.014669916" watchObservedRunningTime="2025-12-02 14:33:16.826748003 +0000 UTC m=+1028.018056712" Dec 02 14:33:16 crc kubenswrapper[4902]: I1202 14:33:16.849141 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cdphg" podStartSLOduration=16.990870473 podStartE2EDuration="41.849117099s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:37.278964445 +0000 UTC m=+988.470273144" lastFinishedPulling="2025-12-02 14:33:02.137211041 +0000 UTC m=+1013.328519770" observedRunningTime="2025-12-02 14:33:16.846237417 +0000 UTC m=+1028.037546136" watchObservedRunningTime="2025-12-02 14:33:16.849117099 +0000 UTC m=+1028.040425808" Dec 02 14:33:18 crc kubenswrapper[4902]: I1202 14:33:18.090314 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-575d4674bc-b4fl5" Dec 02 14:33:21 crc kubenswrapper[4902]: I1202 14:33:21.538898 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" Dec 02 14:33:25 crc kubenswrapper[4902]: I1202 14:33:25.924734 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-nhklg" Dec 02 14:33:27 crc kubenswrapper[4902]: I1202 14:33:27.826691 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r" Dec 02 14:33:29 crc kubenswrapper[4902]: I1202 14:33:29.837975 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" event={"ID":"437c097d-1c14-4668-a3c6-86802ed4a253","Type":"ContainerStarted","Data":"19a417c67bf728d896395f18a1ada3e18ef425e4a16741ed7a46dadbe9695531"} Dec 02 14:33:29 crc kubenswrapper[4902]: I1202 14:33:29.838685 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" Dec 02 14:33:29 crc kubenswrapper[4902]: I1202 14:33:29.862016 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" podStartSLOduration=3.447040874 podStartE2EDuration="54.861982996s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:37.334352089 +0000 UTC m=+988.525660798" lastFinishedPulling="2025-12-02 14:33:28.749294191 +0000 UTC m=+1039.940602920" observedRunningTime="2025-12-02 14:33:29.861470172 +0000 UTC m=+1041.052778871" watchObservedRunningTime="2025-12-02 14:33:29.861982996 +0000 UTC m=+1041.053291745" Dec 02 14:33:32 crc kubenswrapper[4902]: I1202 14:33:32.863699 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s" event={"ID":"a4650041-03e0-4119-b353-281b9200355c","Type":"ContainerStarted","Data":"c3dffac178a7f5d6eb3a68ba66964e08b53f27f737d9b4b5636e9814fdff7a4c"} Dec 02 14:33:32 crc kubenswrapper[4902]: I1202 14:33:32.888638 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gnd6s" podStartSLOduration=2.738140986 podStartE2EDuration="57.888617089s" podCreationTimestamp="2025-12-02 14:32:35 +0000 UTC" firstStartedPulling="2025-12-02 14:32:37.450749508 +0000 UTC m=+988.642058217" lastFinishedPulling="2025-12-02 14:33:32.601225611 +0000 UTC m=+1043.792534320" observedRunningTime="2025-12-02 14:33:32.88828306 +0000 UTC m=+1044.079591779" watchObservedRunningTime="2025-12-02 14:33:32.888617089 +0000 UTC m=+1044.079925798" Dec 02 14:33:34 crc kubenswrapper[4902]: I1202 14:33:34.731778 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:33:34 crc kubenswrapper[4902]: I1202 14:33:34.732258 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:33:36 crc kubenswrapper[4902]: I1202 14:33:36.004410 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-ghmml" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.534841 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tstt5"] Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.536446 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-tstt5" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.540609 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.540832 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.541110 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.541466 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-bncb8" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.545886 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tstt5"] Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.574629 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-rp9kr"] Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.576115 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.583073 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.585311 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-rp9kr"] Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.619495 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42e4352f-2bf5-4299-9598-47d78a1b4cca-config\") pod \"dnsmasq-dns-78dd6ddcc-rp9kr\" (UID: \"42e4352f-2bf5-4299-9598-47d78a1b4cca\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.619573 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42e4352f-2bf5-4299-9598-47d78a1b4cca-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-rp9kr\" (UID: \"42e4352f-2bf5-4299-9598-47d78a1b4cca\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.619595 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4144864-a3cf-4a2d-9a58-c89af72d6002-config\") pod \"dnsmasq-dns-675f4bcbfc-tstt5\" (UID: \"d4144864-a3cf-4a2d-9a58-c89af72d6002\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tstt5" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.619759 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mw4n8\" (UniqueName: \"kubernetes.io/projected/42e4352f-2bf5-4299-9598-47d78a1b4cca-kube-api-access-mw4n8\") pod \"dnsmasq-dns-78dd6ddcc-rp9kr\" (UID: \"42e4352f-2bf5-4299-9598-47d78a1b4cca\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.619794 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxw7f\" (UniqueName: \"kubernetes.io/projected/d4144864-a3cf-4a2d-9a58-c89af72d6002-kube-api-access-sxw7f\") pod \"dnsmasq-dns-675f4bcbfc-tstt5\" (UID: \"d4144864-a3cf-4a2d-9a58-c89af72d6002\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tstt5" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.720725 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mw4n8\" (UniqueName: \"kubernetes.io/projected/42e4352f-2bf5-4299-9598-47d78a1b4cca-kube-api-access-mw4n8\") pod \"dnsmasq-dns-78dd6ddcc-rp9kr\" (UID: \"42e4352f-2bf5-4299-9598-47d78a1b4cca\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.720774 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxw7f\" (UniqueName: \"kubernetes.io/projected/d4144864-a3cf-4a2d-9a58-c89af72d6002-kube-api-access-sxw7f\") pod \"dnsmasq-dns-675f4bcbfc-tstt5\" (UID: \"d4144864-a3cf-4a2d-9a58-c89af72d6002\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tstt5" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.720814 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42e4352f-2bf5-4299-9598-47d78a1b4cca-config\") pod \"dnsmasq-dns-78dd6ddcc-rp9kr\" (UID: \"42e4352f-2bf5-4299-9598-47d78a1b4cca\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.720874 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42e4352f-2bf5-4299-9598-47d78a1b4cca-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-rp9kr\" (UID: \"42e4352f-2bf5-4299-9598-47d78a1b4cca\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.720897 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4144864-a3cf-4a2d-9a58-c89af72d6002-config\") pod \"dnsmasq-dns-675f4bcbfc-tstt5\" (UID: \"d4144864-a3cf-4a2d-9a58-c89af72d6002\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tstt5" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.722018 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42e4352f-2bf5-4299-9598-47d78a1b4cca-config\") pod \"dnsmasq-dns-78dd6ddcc-rp9kr\" (UID: \"42e4352f-2bf5-4299-9598-47d78a1b4cca\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.722025 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4144864-a3cf-4a2d-9a58-c89af72d6002-config\") pod \"dnsmasq-dns-675f4bcbfc-tstt5\" (UID: \"d4144864-a3cf-4a2d-9a58-c89af72d6002\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tstt5" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.722203 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42e4352f-2bf5-4299-9598-47d78a1b4cca-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-rp9kr\" (UID: \"42e4352f-2bf5-4299-9598-47d78a1b4cca\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.744707 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mw4n8\" (UniqueName: \"kubernetes.io/projected/42e4352f-2bf5-4299-9598-47d78a1b4cca-kube-api-access-mw4n8\") pod \"dnsmasq-dns-78dd6ddcc-rp9kr\" (UID: \"42e4352f-2bf5-4299-9598-47d78a1b4cca\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.751054 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxw7f\" (UniqueName: \"kubernetes.io/projected/d4144864-a3cf-4a2d-9a58-c89af72d6002-kube-api-access-sxw7f\") pod \"dnsmasq-dns-675f4bcbfc-tstt5\" (UID: \"d4144864-a3cf-4a2d-9a58-c89af72d6002\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tstt5" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.865206 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-tstt5" Dec 02 14:33:55 crc kubenswrapper[4902]: I1202 14:33:55.904047 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" Dec 02 14:33:56 crc kubenswrapper[4902]: I1202 14:33:56.333256 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tstt5"] Dec 02 14:33:56 crc kubenswrapper[4902]: I1202 14:33:56.416163 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-rp9kr"] Dec 02 14:33:56 crc kubenswrapper[4902]: W1202 14:33:56.417968 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod42e4352f_2bf5_4299_9598_47d78a1b4cca.slice/crio-0fdb380e6bfc59a979ed170007f8be3bbb90208bdf66443540c0cf7378475bd7 WatchSource:0}: Error finding container 0fdb380e6bfc59a979ed170007f8be3bbb90208bdf66443540c0cf7378475bd7: Status 404 returned error can't find the container with id 0fdb380e6bfc59a979ed170007f8be3bbb90208bdf66443540c0cf7378475bd7 Dec 02 14:33:57 crc kubenswrapper[4902]: I1202 14:33:57.087740 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-tstt5" event={"ID":"d4144864-a3cf-4a2d-9a58-c89af72d6002","Type":"ContainerStarted","Data":"a7a93d6ebcd86714989a5838967d385a02aa4cb1b31b702decb86dd1cc20e922"} Dec 02 14:33:57 crc kubenswrapper[4902]: I1202 14:33:57.089068 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" event={"ID":"42e4352f-2bf5-4299-9598-47d78a1b4cca","Type":"ContainerStarted","Data":"0fdb380e6bfc59a979ed170007f8be3bbb90208bdf66443540c0cf7378475bd7"} Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.547418 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tstt5"] Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.571389 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7xk9c"] Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.572687 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.583803 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7xk9c"] Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.661273 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/96185d83-c4f2-4add-a1a4-b3a218127218-dns-svc\") pod \"dnsmasq-dns-666b6646f7-7xk9c\" (UID: \"96185d83-c4f2-4add-a1a4-b3a218127218\") " pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.661357 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96185d83-c4f2-4add-a1a4-b3a218127218-config\") pod \"dnsmasq-dns-666b6646f7-7xk9c\" (UID: \"96185d83-c4f2-4add-a1a4-b3a218127218\") " pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.661388 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bzqq\" (UniqueName: \"kubernetes.io/projected/96185d83-c4f2-4add-a1a4-b3a218127218-kube-api-access-9bzqq\") pod \"dnsmasq-dns-666b6646f7-7xk9c\" (UID: \"96185d83-c4f2-4add-a1a4-b3a218127218\") " pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.771763 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96185d83-c4f2-4add-a1a4-b3a218127218-config\") pod \"dnsmasq-dns-666b6646f7-7xk9c\" (UID: \"96185d83-c4f2-4add-a1a4-b3a218127218\") " pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.771842 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bzqq\" (UniqueName: \"kubernetes.io/projected/96185d83-c4f2-4add-a1a4-b3a218127218-kube-api-access-9bzqq\") pod \"dnsmasq-dns-666b6646f7-7xk9c\" (UID: \"96185d83-c4f2-4add-a1a4-b3a218127218\") " pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.771929 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/96185d83-c4f2-4add-a1a4-b3a218127218-dns-svc\") pod \"dnsmasq-dns-666b6646f7-7xk9c\" (UID: \"96185d83-c4f2-4add-a1a4-b3a218127218\") " pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.773211 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/96185d83-c4f2-4add-a1a4-b3a218127218-dns-svc\") pod \"dnsmasq-dns-666b6646f7-7xk9c\" (UID: \"96185d83-c4f2-4add-a1a4-b3a218127218\") " pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.773353 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96185d83-c4f2-4add-a1a4-b3a218127218-config\") pod \"dnsmasq-dns-666b6646f7-7xk9c\" (UID: \"96185d83-c4f2-4add-a1a4-b3a218127218\") " pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.813412 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bzqq\" (UniqueName: \"kubernetes.io/projected/96185d83-c4f2-4add-a1a4-b3a218127218-kube-api-access-9bzqq\") pod \"dnsmasq-dns-666b6646f7-7xk9c\" (UID: \"96185d83-c4f2-4add-a1a4-b3a218127218\") " pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.861905 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-rp9kr"] Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.887301 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bf4j8"] Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.889251 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.894397 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" Dec 02 14:33:58 crc kubenswrapper[4902]: I1202 14:33:58.899611 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bf4j8"] Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.080299 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xbt4\" (UniqueName: \"kubernetes.io/projected/07017d27-f49f-4452-bfd7-cbc688d4242a-kube-api-access-6xbt4\") pod \"dnsmasq-dns-57d769cc4f-bf4j8\" (UID: \"07017d27-f49f-4452-bfd7-cbc688d4242a\") " pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.080787 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07017d27-f49f-4452-bfd7-cbc688d4242a-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-bf4j8\" (UID: \"07017d27-f49f-4452-bfd7-cbc688d4242a\") " pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.080829 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07017d27-f49f-4452-bfd7-cbc688d4242a-config\") pod \"dnsmasq-dns-57d769cc4f-bf4j8\" (UID: \"07017d27-f49f-4452-bfd7-cbc688d4242a\") " pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.181590 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xbt4\" (UniqueName: \"kubernetes.io/projected/07017d27-f49f-4452-bfd7-cbc688d4242a-kube-api-access-6xbt4\") pod \"dnsmasq-dns-57d769cc4f-bf4j8\" (UID: \"07017d27-f49f-4452-bfd7-cbc688d4242a\") " pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.181691 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07017d27-f49f-4452-bfd7-cbc688d4242a-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-bf4j8\" (UID: \"07017d27-f49f-4452-bfd7-cbc688d4242a\") " pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.181718 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07017d27-f49f-4452-bfd7-cbc688d4242a-config\") pod \"dnsmasq-dns-57d769cc4f-bf4j8\" (UID: \"07017d27-f49f-4452-bfd7-cbc688d4242a\") " pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.182757 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07017d27-f49f-4452-bfd7-cbc688d4242a-config\") pod \"dnsmasq-dns-57d769cc4f-bf4j8\" (UID: \"07017d27-f49f-4452-bfd7-cbc688d4242a\") " pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.182977 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07017d27-f49f-4452-bfd7-cbc688d4242a-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-bf4j8\" (UID: \"07017d27-f49f-4452-bfd7-cbc688d4242a\") " pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.203535 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xbt4\" (UniqueName: \"kubernetes.io/projected/07017d27-f49f-4452-bfd7-cbc688d4242a-kube-api-access-6xbt4\") pod \"dnsmasq-dns-57d769cc4f-bf4j8\" (UID: \"07017d27-f49f-4452-bfd7-cbc688d4242a\") " pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.271174 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.451598 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7xk9c"] Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.715032 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.717435 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.719846 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.720332 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.720547 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.720553 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.720803 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.722105 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.722998 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-gfvhz" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.725002 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.774712 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bf4j8"] Dec 02 14:33:59 crc kubenswrapper[4902]: W1202 14:33:59.779002 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07017d27_f49f_4452_bfd7_cbc688d4242a.slice/crio-ce65b18750baff4af328168f6cc372ac1ffbdfbf45218e7b64f3d583f6f8d171 WatchSource:0}: Error finding container ce65b18750baff4af328168f6cc372ac1ffbdfbf45218e7b64f3d583f6f8d171: Status 404 returned error can't find the container with id ce65b18750baff4af328168f6cc372ac1ffbdfbf45218e7b64f3d583f6f8d171 Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.893598 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.893658 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/abeb5bdf-6307-4dc9-9a97-d638817b544c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.893716 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.893753 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/abeb5bdf-6307-4dc9-9a97-d638817b544c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.893937 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-config-data\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.894062 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.894150 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.894249 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.894288 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.894312 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqdlm\" (UniqueName: \"kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-kube-api-access-cqdlm\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.894677 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.995891 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/abeb5bdf-6307-4dc9-9a97-d638817b544c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.995987 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-config-data\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.996043 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.996079 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.996110 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.996150 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.996172 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqdlm\" (UniqueName: \"kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-kube-api-access-cqdlm\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.996204 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.996259 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.996293 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/abeb5bdf-6307-4dc9-9a97-d638817b544c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.996318 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.996795 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.997031 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.997594 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-config-data\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.997966 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.998162 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:33:59 crc kubenswrapper[4902]: I1202 14:33:59.998177 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.006856 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.013701 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/abeb5bdf-6307-4dc9-9a97-d638817b544c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.021952 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/abeb5bdf-6307-4dc9-9a97-d638817b544c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.027698 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqdlm\" (UniqueName: \"kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-kube-api-access-cqdlm\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.040478 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.083441 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.144480 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.150269 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-7gl6j" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.150524 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.150676 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.150985 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.151063 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.151175 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.151369 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.165542 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.167801 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " pod="openstack/rabbitmq-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.216743 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" event={"ID":"96185d83-c4f2-4add-a1a4-b3a218127218","Type":"ContainerStarted","Data":"034a59e1cb805e80c4e5f66526485ae21846461ea8c031b5f9381da698eef1da"} Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.224699 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" event={"ID":"07017d27-f49f-4452-bfd7-cbc688d4242a","Type":"ContainerStarted","Data":"ce65b18750baff4af328168f6cc372ac1ffbdfbf45218e7b64f3d583f6f8d171"} Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.243418 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.243496 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.243520 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2n6d\" (UniqueName: \"kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-kube-api-access-x2n6d\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.243549 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4c350efc-48d5-4e5d-acee-399252e1b24a-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.243578 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.243596 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4c350efc-48d5-4e5d-acee-399252e1b24a-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.243628 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.243657 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.243709 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.243729 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.243759 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.345043 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.345484 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.345519 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.345543 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2n6d\" (UniqueName: \"kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-kube-api-access-x2n6d\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.345598 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4c350efc-48d5-4e5d-acee-399252e1b24a-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.345621 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.345639 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4c350efc-48d5-4e5d-acee-399252e1b24a-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.345669 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.345684 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.345724 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.345745 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.346224 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.347126 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.348450 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.348587 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.348892 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.349785 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.352915 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.354930 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4c350efc-48d5-4e5d-acee-399252e1b24a-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.358116 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4c350efc-48d5-4e5d-acee-399252e1b24a-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.359337 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.359930 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.378504 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2n6d\" (UniqueName: \"kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-kube-api-access-x2n6d\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.386814 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.491030 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.908354 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 14:34:00 crc kubenswrapper[4902]: W1202 14:34:00.945452 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podabeb5bdf_6307_4dc9_9a97_d638817b544c.slice/crio-142648d21c22f775b177d6586f42e10299b7cc92d330f6a129a3472011a9bfea WatchSource:0}: Error finding container 142648d21c22f775b177d6586f42e10299b7cc92d330f6a129a3472011a9bfea: Status 404 returned error can't find the container with id 142648d21c22f775b177d6586f42e10299b7cc92d330f6a129a3472011a9bfea Dec 02 14:34:00 crc kubenswrapper[4902]: I1202 14:34:00.996930 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 14:34:00 crc kubenswrapper[4902]: W1202 14:34:00.999720 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c350efc_48d5_4e5d_acee_399252e1b24a.slice/crio-6f78c0777c78e0d0f8f965a43c346776c8ba3c732b5fed7d15462a83bf7ca25e WatchSource:0}: Error finding container 6f78c0777c78e0d0f8f965a43c346776c8ba3c732b5fed7d15462a83bf7ca25e: Status 404 returned error can't find the container with id 6f78c0777c78e0d0f8f965a43c346776c8ba3c732b5fed7d15462a83bf7ca25e Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.233780 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4c350efc-48d5-4e5d-acee-399252e1b24a","Type":"ContainerStarted","Data":"6f78c0777c78e0d0f8f965a43c346776c8ba3c732b5fed7d15462a83bf7ca25e"} Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.234742 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"abeb5bdf-6307-4dc9-9a97-d638817b544c","Type":"ContainerStarted","Data":"142648d21c22f775b177d6586f42e10299b7cc92d330f6a129a3472011a9bfea"} Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.444712 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.446169 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.449774 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.449982 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-sggfj" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.450145 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.450302 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.466537 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.466743 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.563964 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff122a60-3f1a-40cb-b1e6-e871037f63c6-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.564203 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff122a60-3f1a-40cb-b1e6-e871037f63c6-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.564255 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ff122a60-3f1a-40cb-b1e6-e871037f63c6-config-data-generated\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.564287 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ff122a60-3f1a-40cb-b1e6-e871037f63c6-kolla-config\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.564468 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.564517 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ff122a60-3f1a-40cb-b1e6-e871037f63c6-config-data-default\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.564554 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctwf4\" (UniqueName: \"kubernetes.io/projected/ff122a60-3f1a-40cb-b1e6-e871037f63c6-kube-api-access-ctwf4\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.564641 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff122a60-3f1a-40cb-b1e6-e871037f63c6-operator-scripts\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.665865 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff122a60-3f1a-40cb-b1e6-e871037f63c6-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.665916 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ff122a60-3f1a-40cb-b1e6-e871037f63c6-config-data-generated\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.665952 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ff122a60-3f1a-40cb-b1e6-e871037f63c6-kolla-config\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.666009 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.666030 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ff122a60-3f1a-40cb-b1e6-e871037f63c6-config-data-default\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.666046 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctwf4\" (UniqueName: \"kubernetes.io/projected/ff122a60-3f1a-40cb-b1e6-e871037f63c6-kube-api-access-ctwf4\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.666077 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff122a60-3f1a-40cb-b1e6-e871037f63c6-operator-scripts\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.666096 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff122a60-3f1a-40cb-b1e6-e871037f63c6-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.667459 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.668146 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ff122a60-3f1a-40cb-b1e6-e871037f63c6-config-data-generated\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.668420 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ff122a60-3f1a-40cb-b1e6-e871037f63c6-config-data-default\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.668955 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ff122a60-3f1a-40cb-b1e6-e871037f63c6-kolla-config\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.670848 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff122a60-3f1a-40cb-b1e6-e871037f63c6-operator-scripts\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.671506 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff122a60-3f1a-40cb-b1e6-e871037f63c6-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.677681 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff122a60-3f1a-40cb-b1e6-e871037f63c6-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.689472 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.694996 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctwf4\" (UniqueName: \"kubernetes.io/projected/ff122a60-3f1a-40cb-b1e6-e871037f63c6-kube-api-access-ctwf4\") pod \"openstack-galera-0\" (UID: \"ff122a60-3f1a-40cb-b1e6-e871037f63c6\") " pod="openstack/openstack-galera-0" Dec 02 14:34:01 crc kubenswrapper[4902]: I1202 14:34:01.776187 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 02 14:34:02 crc kubenswrapper[4902]: I1202 14:34:02.869361 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 02 14:34:02 crc kubenswrapper[4902]: I1202 14:34:02.871360 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:02 crc kubenswrapper[4902]: I1202 14:34:02.899417 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-mzxkf" Dec 02 14:34:02 crc kubenswrapper[4902]: I1202 14:34:02.899552 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 02 14:34:02 crc kubenswrapper[4902]: I1202 14:34:02.899822 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 02 14:34:02 crc kubenswrapper[4902]: I1202 14:34:02.899875 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 02 14:34:02 crc kubenswrapper[4902]: I1202 14:34:02.908537 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.005332 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3525dcb7-da07-49a6-8786-5e046303b028-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.005378 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3525dcb7-da07-49a6-8786-5e046303b028-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.005396 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3525dcb7-da07-49a6-8786-5e046303b028-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.005414 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3525dcb7-da07-49a6-8786-5e046303b028-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.005429 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3525dcb7-da07-49a6-8786-5e046303b028-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.005460 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3525dcb7-da07-49a6-8786-5e046303b028-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.005499 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.005524 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tv6xs\" (UniqueName: \"kubernetes.io/projected/3525dcb7-da07-49a6-8786-5e046303b028-kube-api-access-tv6xs\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.074193 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.084822 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.090620 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.090727 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-xj8s7" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.090917 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.092875 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.107957 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.108009 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tv6xs\" (UniqueName: \"kubernetes.io/projected/3525dcb7-da07-49a6-8786-5e046303b028-kube-api-access-tv6xs\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.108070 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3525dcb7-da07-49a6-8786-5e046303b028-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.108089 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3525dcb7-da07-49a6-8786-5e046303b028-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.108104 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3525dcb7-da07-49a6-8786-5e046303b028-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.108118 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3525dcb7-da07-49a6-8786-5e046303b028-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.108132 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3525dcb7-da07-49a6-8786-5e046303b028-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.108163 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3525dcb7-da07-49a6-8786-5e046303b028-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.109422 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3525dcb7-da07-49a6-8786-5e046303b028-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.109762 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.111286 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3525dcb7-da07-49a6-8786-5e046303b028-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.111786 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3525dcb7-da07-49a6-8786-5e046303b028-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.116119 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3525dcb7-da07-49a6-8786-5e046303b028-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.118339 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3525dcb7-da07-49a6-8786-5e046303b028-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.129112 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3525dcb7-da07-49a6-8786-5e046303b028-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.171419 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tv6xs\" (UniqueName: \"kubernetes.io/projected/3525dcb7-da07-49a6-8786-5e046303b028-kube-api-access-tv6xs\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.212298 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954f9858-8849-4b60-a1dd-1fddd9aaf65e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"954f9858-8849-4b60-a1dd-1fddd9aaf65e\") " pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.212621 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ss6rf\" (UniqueName: \"kubernetes.io/projected/954f9858-8849-4b60-a1dd-1fddd9aaf65e-kube-api-access-ss6rf\") pod \"memcached-0\" (UID: \"954f9858-8849-4b60-a1dd-1fddd9aaf65e\") " pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.212670 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/954f9858-8849-4b60-a1dd-1fddd9aaf65e-config-data\") pod \"memcached-0\" (UID: \"954f9858-8849-4b60-a1dd-1fddd9aaf65e\") " pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.212694 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/954f9858-8849-4b60-a1dd-1fddd9aaf65e-kolla-config\") pod \"memcached-0\" (UID: \"954f9858-8849-4b60-a1dd-1fddd9aaf65e\") " pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.212723 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/954f9858-8849-4b60-a1dd-1fddd9aaf65e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"954f9858-8849-4b60-a1dd-1fddd9aaf65e\") " pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.228779 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"3525dcb7-da07-49a6-8786-5e046303b028\") " pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.315884 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/954f9858-8849-4b60-a1dd-1fddd9aaf65e-config-data\") pod \"memcached-0\" (UID: \"954f9858-8849-4b60-a1dd-1fddd9aaf65e\") " pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.315929 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/954f9858-8849-4b60-a1dd-1fddd9aaf65e-kolla-config\") pod \"memcached-0\" (UID: \"954f9858-8849-4b60-a1dd-1fddd9aaf65e\") " pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.315963 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/954f9858-8849-4b60-a1dd-1fddd9aaf65e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"954f9858-8849-4b60-a1dd-1fddd9aaf65e\") " pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.315990 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954f9858-8849-4b60-a1dd-1fddd9aaf65e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"954f9858-8849-4b60-a1dd-1fddd9aaf65e\") " pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.316046 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ss6rf\" (UniqueName: \"kubernetes.io/projected/954f9858-8849-4b60-a1dd-1fddd9aaf65e-kube-api-access-ss6rf\") pod \"memcached-0\" (UID: \"954f9858-8849-4b60-a1dd-1fddd9aaf65e\") " pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.316913 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/954f9858-8849-4b60-a1dd-1fddd9aaf65e-kolla-config\") pod \"memcached-0\" (UID: \"954f9858-8849-4b60-a1dd-1fddd9aaf65e\") " pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.317221 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/954f9858-8849-4b60-a1dd-1fddd9aaf65e-config-data\") pod \"memcached-0\" (UID: \"954f9858-8849-4b60-a1dd-1fddd9aaf65e\") " pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.320059 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954f9858-8849-4b60-a1dd-1fddd9aaf65e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"954f9858-8849-4b60-a1dd-1fddd9aaf65e\") " pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.320800 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/954f9858-8849-4b60-a1dd-1fddd9aaf65e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"954f9858-8849-4b60-a1dd-1fddd9aaf65e\") " pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.340045 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ss6rf\" (UniqueName: \"kubernetes.io/projected/954f9858-8849-4b60-a1dd-1fddd9aaf65e-kube-api-access-ss6rf\") pod \"memcached-0\" (UID: \"954f9858-8849-4b60-a1dd-1fddd9aaf65e\") " pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.413940 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 02 14:34:03 crc kubenswrapper[4902]: I1202 14:34:03.520769 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:04 crc kubenswrapper[4902]: I1202 14:34:04.733775 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:34:04 crc kubenswrapper[4902]: I1202 14:34:04.734180 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:34:04 crc kubenswrapper[4902]: I1202 14:34:04.734252 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:34:04 crc kubenswrapper[4902]: I1202 14:34:04.735058 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fa3b2259dc7978de3e2c42de1d4c0b7a4d6a7a518fcd2965c20c0c6d1af8af33"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 14:34:04 crc kubenswrapper[4902]: I1202 14:34:04.735128 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://fa3b2259dc7978de3e2c42de1d4c0b7a4d6a7a518fcd2965c20c0c6d1af8af33" gracePeriod=600 Dec 02 14:34:04 crc kubenswrapper[4902]: I1202 14:34:04.886502 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 14:34:04 crc kubenswrapper[4902]: I1202 14:34:04.887775 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 14:34:04 crc kubenswrapper[4902]: I1202 14:34:04.889851 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-4xzzc" Dec 02 14:34:04 crc kubenswrapper[4902]: I1202 14:34:04.906421 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 14:34:04 crc kubenswrapper[4902]: I1202 14:34:04.956639 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7wdf\" (UniqueName: \"kubernetes.io/projected/9dec59dd-e5ef-40bd-bdac-ba601e08ef96-kube-api-access-n7wdf\") pod \"kube-state-metrics-0\" (UID: \"9dec59dd-e5ef-40bd-bdac-ba601e08ef96\") " pod="openstack/kube-state-metrics-0" Dec 02 14:34:05 crc kubenswrapper[4902]: I1202 14:34:05.058108 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7wdf\" (UniqueName: \"kubernetes.io/projected/9dec59dd-e5ef-40bd-bdac-ba601e08ef96-kube-api-access-n7wdf\") pod \"kube-state-metrics-0\" (UID: \"9dec59dd-e5ef-40bd-bdac-ba601e08ef96\") " pod="openstack/kube-state-metrics-0" Dec 02 14:34:05 crc kubenswrapper[4902]: I1202 14:34:05.099462 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7wdf\" (UniqueName: \"kubernetes.io/projected/9dec59dd-e5ef-40bd-bdac-ba601e08ef96-kube-api-access-n7wdf\") pod \"kube-state-metrics-0\" (UID: \"9dec59dd-e5ef-40bd-bdac-ba601e08ef96\") " pod="openstack/kube-state-metrics-0" Dec 02 14:34:05 crc kubenswrapper[4902]: I1202 14:34:05.218161 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 14:34:05 crc kubenswrapper[4902]: I1202 14:34:05.290583 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="fa3b2259dc7978de3e2c42de1d4c0b7a4d6a7a518fcd2965c20c0c6d1af8af33" exitCode=0 Dec 02 14:34:05 crc kubenswrapper[4902]: I1202 14:34:05.290631 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"fa3b2259dc7978de3e2c42de1d4c0b7a4d6a7a518fcd2965c20c0c6d1af8af33"} Dec 02 14:34:05 crc kubenswrapper[4902]: I1202 14:34:05.290736 4902 scope.go:117] "RemoveContainer" containerID="4d3cfcf826d5c881989febb8098e15f6289e747eef104be9d1459d767e21b0fc" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.278858 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.281491 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.283610 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-k2vng" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.289814 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.290101 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.295473 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.295739 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.297276 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.297844 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.384544 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.384606 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.384646 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqwnr\" (UniqueName: \"kubernetes.io/projected/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-kube-api-access-jqwnr\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.384687 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.384712 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.384744 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-config\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.384764 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.384815 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.485885 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.485964 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.485994 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.486011 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.486044 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqwnr\" (UniqueName: \"kubernetes.io/projected/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-kube-api-access-jqwnr\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.486076 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.486108 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.486134 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-config\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.486689 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.491192 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.492355 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.492475 4902 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.492503 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/665a64dfccf06cca52387316c6cee605d1c10e0fb3133ba2864d18ab912b518d/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.493947 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-config\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.498759 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.505276 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.507504 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqwnr\" (UniqueName: \"kubernetes.io/projected/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-kube-api-access-jqwnr\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.526965 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") pod \"prometheus-metric-storage-0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:06 crc kubenswrapper[4902]: I1202 14:34:06.608967 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.102607 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-67clb"] Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.106991 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.110096 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-vml9m" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.111475 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.111979 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.126304 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-67clb"] Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.126364 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-dkmxc"] Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.128122 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.171906 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-dkmxc"] Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.215344 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-scripts\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.215385 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/51ddf240-5cda-4f89-831f-4a20ce9997ed-scripts\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.215402 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-var-lib\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.215419 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/51ddf240-5cda-4f89-831f-4a20ce9997ed-var-run\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.215442 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhb9t\" (UniqueName: \"kubernetes.io/projected/51ddf240-5cda-4f89-831f-4a20ce9997ed-kube-api-access-xhb9t\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.215460 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51ddf240-5cda-4f89-831f-4a20ce9997ed-combined-ca-bundle\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.215498 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-var-run\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.215516 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-etc-ovs\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.215544 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-var-log\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.215583 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/51ddf240-5cda-4f89-831f-4a20ce9997ed-var-run-ovn\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.215598 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/51ddf240-5cda-4f89-831f-4a20ce9997ed-ovn-controller-tls-certs\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.215645 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/51ddf240-5cda-4f89-831f-4a20ce9997ed-var-log-ovn\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.215665 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkjqw\" (UniqueName: \"kubernetes.io/projected/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-kube-api-access-bkjqw\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.317512 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-scripts\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.317550 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/51ddf240-5cda-4f89-831f-4a20ce9997ed-scripts\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.317585 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-var-lib\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.317600 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/51ddf240-5cda-4f89-831f-4a20ce9997ed-var-run\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.317626 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhb9t\" (UniqueName: \"kubernetes.io/projected/51ddf240-5cda-4f89-831f-4a20ce9997ed-kube-api-access-xhb9t\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.317650 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51ddf240-5cda-4f89-831f-4a20ce9997ed-combined-ca-bundle\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.317675 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-var-run\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.317723 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-etc-ovs\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.317745 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-var-log\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.317768 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/51ddf240-5cda-4f89-831f-4a20ce9997ed-ovn-controller-tls-certs\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.317782 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/51ddf240-5cda-4f89-831f-4a20ce9997ed-var-run-ovn\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.317818 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/51ddf240-5cda-4f89-831f-4a20ce9997ed-var-log-ovn\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.317840 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkjqw\" (UniqueName: \"kubernetes.io/projected/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-kube-api-access-bkjqw\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.318780 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-var-log\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.318987 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/51ddf240-5cda-4f89-831f-4a20ce9997ed-var-run-ovn\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.319031 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-etc-ovs\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.319073 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-var-run\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.319109 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/51ddf240-5cda-4f89-831f-4a20ce9997ed-var-log-ovn\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.319113 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-var-lib\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.319521 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/51ddf240-5cda-4f89-831f-4a20ce9997ed-var-run\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.319872 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/51ddf240-5cda-4f89-831f-4a20ce9997ed-scripts\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.319884 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-scripts\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.327976 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51ddf240-5cda-4f89-831f-4a20ce9997ed-combined-ca-bundle\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.331144 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/51ddf240-5cda-4f89-831f-4a20ce9997ed-ovn-controller-tls-certs\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.337818 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhb9t\" (UniqueName: \"kubernetes.io/projected/51ddf240-5cda-4f89-831f-4a20ce9997ed-kube-api-access-xhb9t\") pod \"ovn-controller-67clb\" (UID: \"51ddf240-5cda-4f89-831f-4a20ce9997ed\") " pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.338394 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkjqw\" (UniqueName: \"kubernetes.io/projected/77a6d129-70f2-4c3c-b394-1ed4cfaf5104-kube-api-access-bkjqw\") pod \"ovn-controller-ovs-dkmxc\" (UID: \"77a6d129-70f2-4c3c-b394-1ed4cfaf5104\") " pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.464851 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67clb" Dec 02 14:34:08 crc kubenswrapper[4902]: I1202 14:34:08.471241 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.242919 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.245406 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.247947 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.248227 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.248446 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.248644 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-qrptb" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.250005 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.256003 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.366926 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/79db3800-d728-4ace-a34f-37d44c9c4892-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.367077 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79db3800-d728-4ace-a34f-37d44c9c4892-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.367116 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79db3800-d728-4ace-a34f-37d44c9c4892-config\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.367172 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79db3800-d728-4ace-a34f-37d44c9c4892-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.367201 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.367387 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/79db3800-d728-4ace-a34f-37d44c9c4892-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.367477 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/79db3800-d728-4ace-a34f-37d44c9c4892-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.367619 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw774\" (UniqueName: \"kubernetes.io/projected/79db3800-d728-4ace-a34f-37d44c9c4892-kube-api-access-qw774\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.471952 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/79db3800-d728-4ace-a34f-37d44c9c4892-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.472023 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79db3800-d728-4ace-a34f-37d44c9c4892-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.472060 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79db3800-d728-4ace-a34f-37d44c9c4892-config\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.472093 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79db3800-d728-4ace-a34f-37d44c9c4892-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.472114 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.472150 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/79db3800-d728-4ace-a34f-37d44c9c4892-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.472214 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/79db3800-d728-4ace-a34f-37d44c9c4892-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.472315 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw774\" (UniqueName: \"kubernetes.io/projected/79db3800-d728-4ace-a34f-37d44c9c4892-kube-api-access-qw774\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.474963 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79db3800-d728-4ace-a34f-37d44c9c4892-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.475540 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/79db3800-d728-4ace-a34f-37d44c9c4892-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.475610 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.476102 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79db3800-d728-4ace-a34f-37d44c9c4892-config\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.486864 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/79db3800-d728-4ace-a34f-37d44c9c4892-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.487538 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79db3800-d728-4ace-a34f-37d44c9c4892-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.491676 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/79db3800-d728-4ace-a34f-37d44c9c4892-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.503228 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw774\" (UniqueName: \"kubernetes.io/projected/79db3800-d728-4ace-a34f-37d44c9c4892-kube-api-access-qw774\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.530393 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"79db3800-d728-4ace-a34f-37d44c9c4892\") " pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:10 crc kubenswrapper[4902]: I1202 14:34:10.580140 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.532233 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.534877 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.538955 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.539157 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.539355 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-9zdtf" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.545021 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.558427 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.615115 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.615186 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.615224 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmr4b\" (UniqueName: \"kubernetes.io/projected/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-kube-api-access-zmr4b\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.615265 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.615298 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.615375 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.615420 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.615471 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-config\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.718084 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.718913 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.718978 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmr4b\" (UniqueName: \"kubernetes.io/projected/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-kube-api-access-zmr4b\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.719031 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.719066 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.719208 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.719282 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.719360 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-config\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.719504 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.719729 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.720676 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.721583 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-config\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.724466 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.724847 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.734274 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.735047 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmr4b\" (UniqueName: \"kubernetes.io/projected/2eb37d9d-d6ca-4b36-a153-47d2b417e26b-kube-api-access-zmr4b\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.755868 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"2eb37d9d-d6ca-4b36-a153-47d2b417e26b\") " pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:12 crc kubenswrapper[4902]: I1202 14:34:12.863937 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:16 crc kubenswrapper[4902]: E1202 14:34:16.013013 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 14:34:16 crc kubenswrapper[4902]: E1202 14:34:16.013799 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6xbt4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-bf4j8_openstack(07017d27-f49f-4452-bfd7-cbc688d4242a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 14:34:16 crc kubenswrapper[4902]: E1202 14:34:16.015106 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" podUID="07017d27-f49f-4452-bfd7-cbc688d4242a" Dec 02 14:34:16 crc kubenswrapper[4902]: E1202 14:34:16.015776 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 14:34:16 crc kubenswrapper[4902]: E1202 14:34:16.016891 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9bzqq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-7xk9c_openstack(96185d83-c4f2-4add-a1a4-b3a218127218): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 14:34:16 crc kubenswrapper[4902]: E1202 14:34:16.018042 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" podUID="96185d83-c4f2-4add-a1a4-b3a218127218" Dec 02 14:34:16 crc kubenswrapper[4902]: E1202 14:34:16.076201 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 14:34:16 crc kubenswrapper[4902]: E1202 14:34:16.076810 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mw4n8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-rp9kr_openstack(42e4352f-2bf5-4299-9598-47d78a1b4cca): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 14:34:16 crc kubenswrapper[4902]: E1202 14:34:16.078044 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" podUID="42e4352f-2bf5-4299-9598-47d78a1b4cca" Dec 02 14:34:16 crc kubenswrapper[4902]: E1202 14:34:16.178747 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 14:34:16 crc kubenswrapper[4902]: E1202 14:34:16.179280 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sxw7f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-tstt5_openstack(d4144864-a3cf-4a2d-9a58-c89af72d6002): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 14:34:16 crc kubenswrapper[4902]: E1202 14:34:16.185978 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-tstt5" podUID="d4144864-a3cf-4a2d-9a58-c89af72d6002" Dec 02 14:34:16 crc kubenswrapper[4902]: I1202 14:34:16.433484 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"35031c6ab93a7172db862ff9a8aff08493444d048e85d1cd7f7711172563a23f"} Dec 02 14:34:16 crc kubenswrapper[4902]: E1202 14:34:16.439383 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" podUID="07017d27-f49f-4452-bfd7-cbc688d4242a" Dec 02 14:34:16 crc kubenswrapper[4902]: E1202 14:34:16.439447 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" podUID="96185d83-c4f2-4add-a1a4-b3a218127218" Dec 02 14:34:16 crc kubenswrapper[4902]: I1202 14:34:16.682657 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 02 14:34:16 crc kubenswrapper[4902]: I1202 14:34:16.692182 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 14:34:16 crc kubenswrapper[4902]: I1202 14:34:16.982450 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-67clb"] Dec 02 14:34:16 crc kubenswrapper[4902]: I1202 14:34:16.995496 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.004554 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.009640 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-tstt5" Dec 02 14:34:17 crc kubenswrapper[4902]: W1202 14:34:17.011688 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff122a60_3f1a_40cb_b1e6_e871037f63c6.slice/crio-ce3508848147ad79636ceafe3aa3f1fa2eb8bb2034e5653cf492e2431174d74b WatchSource:0}: Error finding container ce3508848147ad79636ceafe3aa3f1fa2eb8bb2034e5653cf492e2431174d74b: Status 404 returned error can't find the container with id ce3508848147ad79636ceafe3aa3f1fa2eb8bb2034e5653cf492e2431174d74b Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.030731 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.043750 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.091958 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4144864-a3cf-4a2d-9a58-c89af72d6002-config\") pod \"d4144864-a3cf-4a2d-9a58-c89af72d6002\" (UID: \"d4144864-a3cf-4a2d-9a58-c89af72d6002\") " Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.092114 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxw7f\" (UniqueName: \"kubernetes.io/projected/d4144864-a3cf-4a2d-9a58-c89af72d6002-kube-api-access-sxw7f\") pod \"d4144864-a3cf-4a2d-9a58-c89af72d6002\" (UID: \"d4144864-a3cf-4a2d-9a58-c89af72d6002\") " Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.092168 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42e4352f-2bf5-4299-9598-47d78a1b4cca-dns-svc\") pod \"42e4352f-2bf5-4299-9598-47d78a1b4cca\" (UID: \"42e4352f-2bf5-4299-9598-47d78a1b4cca\") " Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.092187 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42e4352f-2bf5-4299-9598-47d78a1b4cca-config\") pod \"42e4352f-2bf5-4299-9598-47d78a1b4cca\" (UID: \"42e4352f-2bf5-4299-9598-47d78a1b4cca\") " Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.092240 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mw4n8\" (UniqueName: \"kubernetes.io/projected/42e4352f-2bf5-4299-9598-47d78a1b4cca-kube-api-access-mw4n8\") pod \"42e4352f-2bf5-4299-9598-47d78a1b4cca\" (UID: \"42e4352f-2bf5-4299-9598-47d78a1b4cca\") " Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.092691 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42e4352f-2bf5-4299-9598-47d78a1b4cca-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "42e4352f-2bf5-4299-9598-47d78a1b4cca" (UID: "42e4352f-2bf5-4299-9598-47d78a1b4cca"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.092747 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4144864-a3cf-4a2d-9a58-c89af72d6002-config" (OuterVolumeSpecName: "config") pod "d4144864-a3cf-4a2d-9a58-c89af72d6002" (UID: "d4144864-a3cf-4a2d-9a58-c89af72d6002"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.092783 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42e4352f-2bf5-4299-9598-47d78a1b4cca-config" (OuterVolumeSpecName: "config") pod "42e4352f-2bf5-4299-9598-47d78a1b4cca" (UID: "42e4352f-2bf5-4299-9598-47d78a1b4cca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.096786 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42e4352f-2bf5-4299-9598-47d78a1b4cca-kube-api-access-mw4n8" (OuterVolumeSpecName: "kube-api-access-mw4n8") pod "42e4352f-2bf5-4299-9598-47d78a1b4cca" (UID: "42e4352f-2bf5-4299-9598-47d78a1b4cca"). InnerVolumeSpecName "kube-api-access-mw4n8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.096818 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4144864-a3cf-4a2d-9a58-c89af72d6002-kube-api-access-sxw7f" (OuterVolumeSpecName: "kube-api-access-sxw7f") pod "d4144864-a3cf-4a2d-9a58-c89af72d6002" (UID: "d4144864-a3cf-4a2d-9a58-c89af72d6002"). InnerVolumeSpecName "kube-api-access-sxw7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.194036 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4144864-a3cf-4a2d-9a58-c89af72d6002-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.194074 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxw7f\" (UniqueName: \"kubernetes.io/projected/d4144864-a3cf-4a2d-9a58-c89af72d6002-kube-api-access-sxw7f\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.194093 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42e4352f-2bf5-4299-9598-47d78a1b4cca-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.194109 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42e4352f-2bf5-4299-9598-47d78a1b4cca-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.194125 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mw4n8\" (UniqueName: \"kubernetes.io/projected/42e4352f-2bf5-4299-9598-47d78a1b4cca-kube-api-access-mw4n8\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.233290 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-dkmxc"] Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.321282 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 02 14:34:17 crc kubenswrapper[4902]: W1202 14:34:17.354849 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod77a6d129_70f2_4c3c_b394_1ed4cfaf5104.slice/crio-c486315fe3774026be86b5a2842eb373a8e77cda2c446a44fe884b779e1fca7e WatchSource:0}: Error finding container c486315fe3774026be86b5a2842eb373a8e77cda2c446a44fe884b779e1fca7e: Status 404 returned error can't find the container with id c486315fe3774026be86b5a2842eb373a8e77cda2c446a44fe884b779e1fca7e Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.446221 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dkmxc" event={"ID":"77a6d129-70f2-4c3c-b394-1ed4cfaf5104","Type":"ContainerStarted","Data":"c486315fe3774026be86b5a2842eb373a8e77cda2c446a44fe884b779e1fca7e"} Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.448251 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0","Type":"ContainerStarted","Data":"35554987d92791a21de7a4725afbe317607b3998f83a3d373a2c127c30c8e48c"} Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.449084 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"3525dcb7-da07-49a6-8786-5e046303b028","Type":"ContainerStarted","Data":"f95e5944ab2b7629b6d8320b6ff48fcc56890aa08e16df1d1bf5926abec79395"} Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.450952 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ff122a60-3f1a-40cb-b1e6-e871037f63c6","Type":"ContainerStarted","Data":"ce3508848147ad79636ceafe3aa3f1fa2eb8bb2034e5653cf492e2431174d74b"} Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.452863 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"abeb5bdf-6307-4dc9-9a97-d638817b544c","Type":"ContainerStarted","Data":"3033e15d7080893bfc76fe29831d27b48eb5732d47a6cbcd386f840438b6121c"} Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.454623 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"79db3800-d728-4ace-a34f-37d44c9c4892","Type":"ContainerStarted","Data":"9d815677ecafb9faeb4e010576f06d2873638cbe735af5240eef590f7d126abc"} Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.456104 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" event={"ID":"42e4352f-2bf5-4299-9598-47d78a1b4cca","Type":"ContainerDied","Data":"0fdb380e6bfc59a979ed170007f8be3bbb90208bdf66443540c0cf7378475bd7"} Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.456170 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-rp9kr" Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.459127 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"954f9858-8849-4b60-a1dd-1fddd9aaf65e","Type":"ContainerStarted","Data":"030d293937350eb94909261f27437c690b0d2c1e5886e8d3404ea3887b1abe90"} Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.460363 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-tstt5" event={"ID":"d4144864-a3cf-4a2d-9a58-c89af72d6002","Type":"ContainerDied","Data":"a7a93d6ebcd86714989a5838967d385a02aa4cb1b31b702decb86dd1cc20e922"} Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.460415 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-tstt5" Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.462393 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9dec59dd-e5ef-40bd-bdac-ba601e08ef96","Type":"ContainerStarted","Data":"544d6114e7db746e72d02dab86eb2db365091ba29e23db521cfabef5c9141f5d"} Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.464051 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4c350efc-48d5-4e5d-acee-399252e1b24a","Type":"ContainerStarted","Data":"3e655273fe43d2b060992df0598b8087baceb35e6ab8d009013b8020e6022672"} Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.465429 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-67clb" event={"ID":"51ddf240-5cda-4f89-831f-4a20ce9997ed","Type":"ContainerStarted","Data":"6f454313116423d2c3b2e42a36fbbac66f9bf635d0720e65f478c191f8f2b919"} Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.549466 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-rp9kr"] Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.562630 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-rp9kr"] Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.577750 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tstt5"] Dec 02 14:34:17 crc kubenswrapper[4902]: I1202 14:34:17.582748 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tstt5"] Dec 02 14:34:18 crc kubenswrapper[4902]: I1202 14:34:18.051799 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 02 14:34:18 crc kubenswrapper[4902]: W1202 14:34:18.123279 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2eb37d9d_d6ca_4b36_a153_47d2b417e26b.slice/crio-67426bf878b1e5e331b33489f24d5eb7ca3048245c432dc6f894369030f8cd3f WatchSource:0}: Error finding container 67426bf878b1e5e331b33489f24d5eb7ca3048245c432dc6f894369030f8cd3f: Status 404 returned error can't find the container with id 67426bf878b1e5e331b33489f24d5eb7ca3048245c432dc6f894369030f8cd3f Dec 02 14:34:18 crc kubenswrapper[4902]: I1202 14:34:18.478397 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"2eb37d9d-d6ca-4b36-a153-47d2b417e26b","Type":"ContainerStarted","Data":"67426bf878b1e5e331b33489f24d5eb7ca3048245c432dc6f894369030f8cd3f"} Dec 02 14:34:19 crc kubenswrapper[4902]: I1202 14:34:19.115859 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42e4352f-2bf5-4299-9598-47d78a1b4cca" path="/var/lib/kubelet/pods/42e4352f-2bf5-4299-9598-47d78a1b4cca/volumes" Dec 02 14:34:19 crc kubenswrapper[4902]: I1202 14:34:19.116229 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4144864-a3cf-4a2d-9a58-c89af72d6002" path="/var/lib/kubelet/pods/d4144864-a3cf-4a2d-9a58-c89af72d6002/volumes" Dec 02 14:34:24 crc kubenswrapper[4902]: I1202 14:34:24.523603 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"79db3800-d728-4ace-a34f-37d44c9c4892","Type":"ContainerStarted","Data":"feb3377951205a43c782243ca2f8be8d0f7b68546a320f646f9a85dbe0af44c4"} Dec 02 14:34:24 crc kubenswrapper[4902]: I1202 14:34:24.525999 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"2eb37d9d-d6ca-4b36-a153-47d2b417e26b","Type":"ContainerStarted","Data":"7ecb5996a306dae58e00df09f4faeee5f1311536de7edadc8a858622b0019573"} Dec 02 14:34:24 crc kubenswrapper[4902]: I1202 14:34:24.528171 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-67clb" event={"ID":"51ddf240-5cda-4f89-831f-4a20ce9997ed","Type":"ContainerStarted","Data":"d92de5c0e0eb9cadd4c9a9b226847d7921590e5063f02813795a7cf3f5618094"} Dec 02 14:34:24 crc kubenswrapper[4902]: I1202 14:34:24.528318 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-67clb" Dec 02 14:34:24 crc kubenswrapper[4902]: I1202 14:34:24.530617 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"954f9858-8849-4b60-a1dd-1fddd9aaf65e","Type":"ContainerStarted","Data":"a95e4dd86d1a67697c85138d4d2fe2601bf72e6ed6b76b13f263e6b7311aab72"} Dec 02 14:34:24 crc kubenswrapper[4902]: I1202 14:34:24.530934 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 02 14:34:24 crc kubenswrapper[4902]: I1202 14:34:24.532740 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dkmxc" event={"ID":"77a6d129-70f2-4c3c-b394-1ed4cfaf5104","Type":"ContainerStarted","Data":"01087700efad2a22fbae9f1451857a272e7fd1c94e997c76d3bc8a8efb2ed11b"} Dec 02 14:34:24 crc kubenswrapper[4902]: I1202 14:34:24.534524 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9dec59dd-e5ef-40bd-bdac-ba601e08ef96","Type":"ContainerStarted","Data":"923f094dbbc6a10f1dac70d7863f9222d9d7538f6540ff54122ef1ba81b8bd43"} Dec 02 14:34:24 crc kubenswrapper[4902]: I1202 14:34:24.534715 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 02 14:34:24 crc kubenswrapper[4902]: I1202 14:34:24.536707 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"3525dcb7-da07-49a6-8786-5e046303b028","Type":"ContainerStarted","Data":"d765554390137202c383691609227cbb456dcec5b27c0221f7ebc30246a67efb"} Dec 02 14:34:24 crc kubenswrapper[4902]: I1202 14:34:24.538315 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ff122a60-3f1a-40cb-b1e6-e871037f63c6","Type":"ContainerStarted","Data":"bb28dc7cf9562463a0011ce96f48d55cc7cbce73ff09a96a5e79b7862842eb50"} Dec 02 14:34:24 crc kubenswrapper[4902]: I1202 14:34:24.545153 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-67clb" podStartSLOduration=9.636517852 podStartE2EDuration="16.545132332s" podCreationTimestamp="2025-12-02 14:34:08 +0000 UTC" firstStartedPulling="2025-12-02 14:34:17.012294355 +0000 UTC m=+1088.203603064" lastFinishedPulling="2025-12-02 14:34:23.920908825 +0000 UTC m=+1095.112217544" observedRunningTime="2025-12-02 14:34:24.544681449 +0000 UTC m=+1095.735990168" watchObservedRunningTime="2025-12-02 14:34:24.545132332 +0000 UTC m=+1095.736441041" Dec 02 14:34:24 crc kubenswrapper[4902]: I1202 14:34:24.617762 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=13.443051873 podStartE2EDuration="20.617742617s" podCreationTimestamp="2025-12-02 14:34:04 +0000 UTC" firstStartedPulling="2025-12-02 14:34:16.700901967 +0000 UTC m=+1087.892210686" lastFinishedPulling="2025-12-02 14:34:23.875592721 +0000 UTC m=+1095.066901430" observedRunningTime="2025-12-02 14:34:24.614473376 +0000 UTC m=+1095.805782095" watchObservedRunningTime="2025-12-02 14:34:24.617742617 +0000 UTC m=+1095.809051326" Dec 02 14:34:24 crc kubenswrapper[4902]: I1202 14:34:24.652011 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=14.499318063 podStartE2EDuration="21.651994303s" podCreationTimestamp="2025-12-02 14:34:03 +0000 UTC" firstStartedPulling="2025-12-02 14:34:16.704150397 +0000 UTC m=+1087.895459106" lastFinishedPulling="2025-12-02 14:34:23.856826607 +0000 UTC m=+1095.048135346" observedRunningTime="2025-12-02 14:34:24.648765343 +0000 UTC m=+1095.840074052" watchObservedRunningTime="2025-12-02 14:34:24.651994303 +0000 UTC m=+1095.843303012" Dec 02 14:34:24 crc kubenswrapper[4902]: E1202 14:34:24.870416 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod77a6d129_70f2_4c3c_b394_1ed4cfaf5104.slice/crio-conmon-01087700efad2a22fbae9f1451857a272e7fd1c94e997c76d3bc8a8efb2ed11b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod77a6d129_70f2_4c3c_b394_1ed4cfaf5104.slice/crio-01087700efad2a22fbae9f1451857a272e7fd1c94e997c76d3bc8a8efb2ed11b.scope\": RecentStats: unable to find data in memory cache]" Dec 02 14:34:25 crc kubenswrapper[4902]: I1202 14:34:25.549419 4902 generic.go:334] "Generic (PLEG): container finished" podID="77a6d129-70f2-4c3c-b394-1ed4cfaf5104" containerID="01087700efad2a22fbae9f1451857a272e7fd1c94e997c76d3bc8a8efb2ed11b" exitCode=0 Dec 02 14:34:25 crc kubenswrapper[4902]: I1202 14:34:25.551643 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dkmxc" event={"ID":"77a6d129-70f2-4c3c-b394-1ed4cfaf5104","Type":"ContainerDied","Data":"01087700efad2a22fbae9f1451857a272e7fd1c94e997c76d3bc8a8efb2ed11b"} Dec 02 14:34:26 crc kubenswrapper[4902]: I1202 14:34:26.561007 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0","Type":"ContainerStarted","Data":"aff94390a576c9473b54c81e832e792fd589295cff84eb9feebe60201bc82327"} Dec 02 14:34:27 crc kubenswrapper[4902]: I1202 14:34:27.580675 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"2eb37d9d-d6ca-4b36-a153-47d2b417e26b","Type":"ContainerStarted","Data":"4dd9c9647cc7149455c40b2f09f1c965ae7936b25d875d5ea9c2248c07e5d656"} Dec 02 14:34:27 crc kubenswrapper[4902]: I1202 14:34:27.583356 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dkmxc" event={"ID":"77a6d129-70f2-4c3c-b394-1ed4cfaf5104","Type":"ContainerStarted","Data":"690def1bc06851913fb3b1adcbd3ac68c5b75e2a95737c91c168bad91720c320"} Dec 02 14:34:27 crc kubenswrapper[4902]: I1202 14:34:27.583467 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dkmxc" event={"ID":"77a6d129-70f2-4c3c-b394-1ed4cfaf5104","Type":"ContainerStarted","Data":"b36a402f63d1fdae4e7bf6f1e9b2d054f3cd3d8c7cb1236c5b15a2c68928427d"} Dec 02 14:34:27 crc kubenswrapper[4902]: I1202 14:34:27.583505 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:27 crc kubenswrapper[4902]: I1202 14:34:27.583535 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:27 crc kubenswrapper[4902]: I1202 14:34:27.587322 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"79db3800-d728-4ace-a34f-37d44c9c4892","Type":"ContainerStarted","Data":"05700d72578f75f6e827a813faed6246d33d07f446e3ff366f69ac73c5778ddd"} Dec 02 14:34:27 crc kubenswrapper[4902]: I1202 14:34:27.603670 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=7.812818089 podStartE2EDuration="16.603651984s" podCreationTimestamp="2025-12-02 14:34:11 +0000 UTC" firstStartedPulling="2025-12-02 14:34:18.125098452 +0000 UTC m=+1089.316407171" lastFinishedPulling="2025-12-02 14:34:26.915932357 +0000 UTC m=+1098.107241066" observedRunningTime="2025-12-02 14:34:27.599402706 +0000 UTC m=+1098.790711425" watchObservedRunningTime="2025-12-02 14:34:27.603651984 +0000 UTC m=+1098.794960703" Dec 02 14:34:27 crc kubenswrapper[4902]: I1202 14:34:27.631319 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=9.102900923 podStartE2EDuration="18.631297126s" podCreationTimestamp="2025-12-02 14:34:09 +0000 UTC" firstStartedPulling="2025-12-02 14:34:17.369790609 +0000 UTC m=+1088.561099318" lastFinishedPulling="2025-12-02 14:34:26.898186822 +0000 UTC m=+1098.089495521" observedRunningTime="2025-12-02 14:34:27.623595441 +0000 UTC m=+1098.814904160" watchObservedRunningTime="2025-12-02 14:34:27.631297126 +0000 UTC m=+1098.822605835" Dec 02 14:34:27 crc kubenswrapper[4902]: I1202 14:34:27.864502 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:27 crc kubenswrapper[4902]: I1202 14:34:27.865822 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:27 crc kubenswrapper[4902]: I1202 14:34:27.915193 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:27 crc kubenswrapper[4902]: I1202 14:34:27.939887 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-dkmxc" podStartSLOduration=13.420568596 podStartE2EDuration="19.939868425s" podCreationTimestamp="2025-12-02 14:34:08 +0000 UTC" firstStartedPulling="2025-12-02 14:34:17.358111003 +0000 UTC m=+1088.549419712" lastFinishedPulling="2025-12-02 14:34:23.877410832 +0000 UTC m=+1095.068719541" observedRunningTime="2025-12-02 14:34:27.6461584 +0000 UTC m=+1098.837467109" watchObservedRunningTime="2025-12-02 14:34:27.939868425 +0000 UTC m=+1099.131177134" Dec 02 14:34:28 crc kubenswrapper[4902]: I1202 14:34:28.581503 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:28 crc kubenswrapper[4902]: I1202 14:34:28.651368 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:28 crc kubenswrapper[4902]: I1202 14:34:28.761588 4902 generic.go:334] "Generic (PLEG): container finished" podID="3525dcb7-da07-49a6-8786-5e046303b028" containerID="d765554390137202c383691609227cbb456dcec5b27c0221f7ebc30246a67efb" exitCode=0 Dec 02 14:34:28 crc kubenswrapper[4902]: I1202 14:34:28.761693 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"3525dcb7-da07-49a6-8786-5e046303b028","Type":"ContainerDied","Data":"d765554390137202c383691609227cbb456dcec5b27c0221f7ebc30246a67efb"} Dec 02 14:34:28 crc kubenswrapper[4902]: I1202 14:34:28.764316 4902 generic.go:334] "Generic (PLEG): container finished" podID="ff122a60-3f1a-40cb-b1e6-e871037f63c6" containerID="bb28dc7cf9562463a0011ce96f48d55cc7cbce73ff09a96a5e79b7862842eb50" exitCode=0 Dec 02 14:34:28 crc kubenswrapper[4902]: I1202 14:34:28.764695 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ff122a60-3f1a-40cb-b1e6-e871037f63c6","Type":"ContainerDied","Data":"bb28dc7cf9562463a0011ce96f48d55cc7cbce73ff09a96a5e79b7862842eb50"} Dec 02 14:34:28 crc kubenswrapper[4902]: I1202 14:34:28.765130 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:29 crc kubenswrapper[4902]: I1202 14:34:29.826676 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 02 14:34:29 crc kubenswrapper[4902]: I1202 14:34:29.846385 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.028708 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bf4j8"] Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.061655 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-864w4"] Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.063322 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.065718 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.076985 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-864w4"] Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.139882 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-864w4\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.140041 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-config\") pod \"dnsmasq-dns-7fd796d7df-864w4\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.140107 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bpv5\" (UniqueName: \"kubernetes.io/projected/79e9491d-4b96-4c77-b817-78b2287dd0d2-kube-api-access-9bpv5\") pod \"dnsmasq-dns-7fd796d7df-864w4\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.140152 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-864w4\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.154642 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-68q42"] Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.158489 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.162242 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.191227 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-68q42"] Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.242074 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/a39354f8-da6e-43a2-9a3d-49e42af19590-ovs-rundir\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.242127 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a39354f8-da6e-43a2-9a3d-49e42af19590-config\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.242161 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-864w4\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.242208 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/a39354f8-da6e-43a2-9a3d-49e42af19590-ovn-rundir\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.242224 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-config\") pod \"dnsmasq-dns-7fd796d7df-864w4\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.242249 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bpv5\" (UniqueName: \"kubernetes.io/projected/79e9491d-4b96-4c77-b817-78b2287dd0d2-kube-api-access-9bpv5\") pod \"dnsmasq-dns-7fd796d7df-864w4\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.242270 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-864w4\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.242319 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtkcm\" (UniqueName: \"kubernetes.io/projected/a39354f8-da6e-43a2-9a3d-49e42af19590-kube-api-access-rtkcm\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.242348 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a39354f8-da6e-43a2-9a3d-49e42af19590-combined-ca-bundle\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.242379 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a39354f8-da6e-43a2-9a3d-49e42af19590-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.243112 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-config\") pod \"dnsmasq-dns-7fd796d7df-864w4\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.243177 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-864w4\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.243916 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-864w4\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.268981 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bpv5\" (UniqueName: \"kubernetes.io/projected/79e9491d-4b96-4c77-b817-78b2287dd0d2-kube-api-access-9bpv5\") pod \"dnsmasq-dns-7fd796d7df-864w4\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.287761 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7xk9c"] Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.327121 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.329477 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.336203 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.336492 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-lp45h" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.336647 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.336775 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.343777 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.345388 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/a39354f8-da6e-43a2-9a3d-49e42af19590-ovn-rundir\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.345615 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtkcm\" (UniqueName: \"kubernetes.io/projected/a39354f8-da6e-43a2-9a3d-49e42af19590-kube-api-access-rtkcm\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.345755 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a39354f8-da6e-43a2-9a3d-49e42af19590-combined-ca-bundle\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.345888 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a39354f8-da6e-43a2-9a3d-49e42af19590-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.346002 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/a39354f8-da6e-43a2-9a3d-49e42af19590-ovs-rundir\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.346101 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a39354f8-da6e-43a2-9a3d-49e42af19590-config\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.346505 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/a39354f8-da6e-43a2-9a3d-49e42af19590-ovn-rundir\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.347295 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a39354f8-da6e-43a2-9a3d-49e42af19590-config\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.347529 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/a39354f8-da6e-43a2-9a3d-49e42af19590-ovs-rundir\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.352600 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-ppngm"] Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.357628 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a39354f8-da6e-43a2-9a3d-49e42af19590-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.357700 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a39354f8-da6e-43a2-9a3d-49e42af19590-combined-ca-bundle\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.361368 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-ppngm"] Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.361465 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.363896 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.375428 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtkcm\" (UniqueName: \"kubernetes.io/projected/a39354f8-da6e-43a2-9a3d-49e42af19590-kube-api-access-rtkcm\") pod \"ovn-controller-metrics-68q42\" (UID: \"a39354f8-da6e-43a2-9a3d-49e42af19590\") " pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.383531 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.503394 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/412330a8-4fac-4700-a5f8-5054af56f44b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.503428 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/412330a8-4fac-4700-a5f8-5054af56f44b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.503453 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-ppngm\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.503471 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/412330a8-4fac-4700-a5f8-5054af56f44b-scripts\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.503506 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/412330a8-4fac-4700-a5f8-5054af56f44b-config\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.503534 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv6lc\" (UniqueName: \"kubernetes.io/projected/412330a8-4fac-4700-a5f8-5054af56f44b-kube-api-access-dv6lc\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.503575 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-ppngm\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.503596 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/412330a8-4fac-4700-a5f8-5054af56f44b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.503616 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/412330a8-4fac-4700-a5f8-5054af56f44b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.503641 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-config\") pod \"dnsmasq-dns-86db49b7ff-ppngm\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.503753 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-68q42" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.504178 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-ppngm\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.504214 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhzjp\" (UniqueName: \"kubernetes.io/projected/5f338b3f-0275-42a1-be04-016bd343525c-kube-api-access-zhzjp\") pod \"dnsmasq-dns-86db49b7ff-ppngm\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.606462 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/412330a8-4fac-4700-a5f8-5054af56f44b-config\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.606523 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv6lc\" (UniqueName: \"kubernetes.io/projected/412330a8-4fac-4700-a5f8-5054af56f44b-kube-api-access-dv6lc\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.606574 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-ppngm\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.606828 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/412330a8-4fac-4700-a5f8-5054af56f44b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.606874 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/412330a8-4fac-4700-a5f8-5054af56f44b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.606937 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-config\") pod \"dnsmasq-dns-86db49b7ff-ppngm\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.606998 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhzjp\" (UniqueName: \"kubernetes.io/projected/5f338b3f-0275-42a1-be04-016bd343525c-kube-api-access-zhzjp\") pod \"dnsmasq-dns-86db49b7ff-ppngm\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.607026 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-ppngm\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.607160 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/412330a8-4fac-4700-a5f8-5054af56f44b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.607190 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/412330a8-4fac-4700-a5f8-5054af56f44b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.607254 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-ppngm\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.607309 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/412330a8-4fac-4700-a5f8-5054af56f44b-scripts\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.607450 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/412330a8-4fac-4700-a5f8-5054af56f44b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.608065 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/412330a8-4fac-4700-a5f8-5054af56f44b-config\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.608517 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/412330a8-4fac-4700-a5f8-5054af56f44b-scripts\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.608822 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-ppngm\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.610719 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-config\") pod \"dnsmasq-dns-86db49b7ff-ppngm\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.611429 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-ppngm\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.611477 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-ppngm\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.620202 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/412330a8-4fac-4700-a5f8-5054af56f44b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.620848 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/412330a8-4fac-4700-a5f8-5054af56f44b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.623443 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/412330a8-4fac-4700-a5f8-5054af56f44b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.627852 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhzjp\" (UniqueName: \"kubernetes.io/projected/5f338b3f-0275-42a1-be04-016bd343525c-kube-api-access-zhzjp\") pod \"dnsmasq-dns-86db49b7ff-ppngm\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.633206 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv6lc\" (UniqueName: \"kubernetes.io/projected/412330a8-4fac-4700-a5f8-5054af56f44b-kube-api-access-dv6lc\") pod \"ovn-northd-0\" (UID: \"412330a8-4fac-4700-a5f8-5054af56f44b\") " pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.722329 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.781656 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" event={"ID":"96185d83-c4f2-4add-a1a4-b3a218127218","Type":"ContainerDied","Data":"034a59e1cb805e80c4e5f66526485ae21846461ea8c031b5f9381da698eef1da"} Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.781747 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7xk9c" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.805937 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.825525 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.917632 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bzqq\" (UniqueName: \"kubernetes.io/projected/96185d83-c4f2-4add-a1a4-b3a218127218-kube-api-access-9bzqq\") pod \"96185d83-c4f2-4add-a1a4-b3a218127218\" (UID: \"96185d83-c4f2-4add-a1a4-b3a218127218\") " Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.917711 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/96185d83-c4f2-4add-a1a4-b3a218127218-dns-svc\") pod \"96185d83-c4f2-4add-a1a4-b3a218127218\" (UID: \"96185d83-c4f2-4add-a1a4-b3a218127218\") " Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.917824 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96185d83-c4f2-4add-a1a4-b3a218127218-config\") pod \"96185d83-c4f2-4add-a1a4-b3a218127218\" (UID: \"96185d83-c4f2-4add-a1a4-b3a218127218\") " Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.919159 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96185d83-c4f2-4add-a1a4-b3a218127218-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "96185d83-c4f2-4add-a1a4-b3a218127218" (UID: "96185d83-c4f2-4add-a1a4-b3a218127218"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.919513 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96185d83-c4f2-4add-a1a4-b3a218127218-config" (OuterVolumeSpecName: "config") pod "96185d83-c4f2-4add-a1a4-b3a218127218" (UID: "96185d83-c4f2-4add-a1a4-b3a218127218"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.921670 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96185d83-c4f2-4add-a1a4-b3a218127218-kube-api-access-9bzqq" (OuterVolumeSpecName: "kube-api-access-9bzqq") pod "96185d83-c4f2-4add-a1a4-b3a218127218" (UID: "96185d83-c4f2-4add-a1a4-b3a218127218"). InnerVolumeSpecName "kube-api-access-9bzqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:30 crc kubenswrapper[4902]: I1202 14:34:30.961354 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-864w4"] Dec 02 14:34:31 crc kubenswrapper[4902]: I1202 14:34:31.022730 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96185d83-c4f2-4add-a1a4-b3a218127218-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:31 crc kubenswrapper[4902]: I1202 14:34:31.022773 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bzqq\" (UniqueName: \"kubernetes.io/projected/96185d83-c4f2-4add-a1a4-b3a218127218-kube-api-access-9bzqq\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:31 crc kubenswrapper[4902]: I1202 14:34:31.022789 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/96185d83-c4f2-4add-a1a4-b3a218127218-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:31 crc kubenswrapper[4902]: I1202 14:34:31.057704 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-68q42"] Dec 02 14:34:31 crc kubenswrapper[4902]: I1202 14:34:31.143714 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7xk9c"] Dec 02 14:34:31 crc kubenswrapper[4902]: I1202 14:34:31.153064 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7xk9c"] Dec 02 14:34:31 crc kubenswrapper[4902]: W1202 14:34:31.345123 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod79e9491d_4b96_4c77_b817_78b2287dd0d2.slice/crio-a889afa899bdaf45ef439815e73157ddcea3b9db1651880dac5a958f5f239a74 WatchSource:0}: Error finding container a889afa899bdaf45ef439815e73157ddcea3b9db1651880dac5a958f5f239a74: Status 404 returned error can't find the container with id a889afa899bdaf45ef439815e73157ddcea3b9db1651880dac5a958f5f239a74 Dec 02 14:34:31 crc kubenswrapper[4902]: W1202 14:34:31.347676 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda39354f8_da6e_43a2_9a3d_49e42af19590.slice/crio-a3fc9f9f472b7ef2768479e6a583f9c3ce541bca69254790c86099f2b5c9a801 WatchSource:0}: Error finding container a3fc9f9f472b7ef2768479e6a583f9c3ce541bca69254790c86099f2b5c9a801: Status 404 returned error can't find the container with id a3fc9f9f472b7ef2768479e6a583f9c3ce541bca69254790c86099f2b5c9a801 Dec 02 14:34:31 crc kubenswrapper[4902]: I1202 14:34:31.792096 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-864w4" event={"ID":"79e9491d-4b96-4c77-b817-78b2287dd0d2","Type":"ContainerStarted","Data":"a889afa899bdaf45ef439815e73157ddcea3b9db1651880dac5a958f5f239a74"} Dec 02 14:34:31 crc kubenswrapper[4902]: I1202 14:34:31.794256 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-68q42" event={"ID":"a39354f8-da6e-43a2-9a3d-49e42af19590","Type":"ContainerStarted","Data":"a3fc9f9f472b7ef2768479e6a583f9c3ce541bca69254790c86099f2b5c9a801"} Dec 02 14:34:31 crc kubenswrapper[4902]: I1202 14:34:31.849396 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 02 14:34:31 crc kubenswrapper[4902]: W1202 14:34:31.849519 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod412330a8_4fac_4700_a5f8_5054af56f44b.slice/crio-ff7f080de2eab8039041d632e77a767ee70fd2024067da9d47870474f72f9b95 WatchSource:0}: Error finding container ff7f080de2eab8039041d632e77a767ee70fd2024067da9d47870474f72f9b95: Status 404 returned error can't find the container with id ff7f080de2eab8039041d632e77a767ee70fd2024067da9d47870474f72f9b95 Dec 02 14:34:31 crc kubenswrapper[4902]: I1202 14:34:31.915342 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-ppngm"] Dec 02 14:34:31 crc kubenswrapper[4902]: W1202 14:34:31.920635 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f338b3f_0275_42a1_be04_016bd343525c.slice/crio-f30cc23cf2f05323816c6009ddb0cc916a90f6bf4beedbb038bf549668ddb8f8 WatchSource:0}: Error finding container f30cc23cf2f05323816c6009ddb0cc916a90f6bf4beedbb038bf549668ddb8f8: Status 404 returned error can't find the container with id f30cc23cf2f05323816c6009ddb0cc916a90f6bf4beedbb038bf549668ddb8f8 Dec 02 14:34:32 crc kubenswrapper[4902]: I1202 14:34:32.806003 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"412330a8-4fac-4700-a5f8-5054af56f44b","Type":"ContainerStarted","Data":"ff7f080de2eab8039041d632e77a767ee70fd2024067da9d47870474f72f9b95"} Dec 02 14:34:32 crc kubenswrapper[4902]: I1202 14:34:32.807622 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" event={"ID":"5f338b3f-0275-42a1-be04-016bd343525c","Type":"ContainerStarted","Data":"f30cc23cf2f05323816c6009ddb0cc916a90f6bf4beedbb038bf549668ddb8f8"} Dec 02 14:34:33 crc kubenswrapper[4902]: I1202 14:34:33.124813 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96185d83-c4f2-4add-a1a4-b3a218127218" path="/var/lib/kubelet/pods/96185d83-c4f2-4add-a1a4-b3a218127218/volumes" Dec 02 14:34:33 crc kubenswrapper[4902]: I1202 14:34:33.416265 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.168820 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-864w4"] Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.205130 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-sl7jl"] Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.226257 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.253278 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-sl7jl"] Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.259857 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.402586 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-dns-svc\") pod \"dnsmasq-dns-698758b865-sl7jl\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.402656 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-sl7jl\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.402694 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-sl7jl\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.402740 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-config\") pod \"dnsmasq-dns-698758b865-sl7jl\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.402797 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7qdv\" (UniqueName: \"kubernetes.io/projected/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-kube-api-access-w7qdv\") pod \"dnsmasq-dns-698758b865-sl7jl\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.504249 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-config\") pod \"dnsmasq-dns-698758b865-sl7jl\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.504333 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7qdv\" (UniqueName: \"kubernetes.io/projected/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-kube-api-access-w7qdv\") pod \"dnsmasq-dns-698758b865-sl7jl\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.504361 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-dns-svc\") pod \"dnsmasq-dns-698758b865-sl7jl\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.504389 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-sl7jl\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.504422 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-sl7jl\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.505192 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-config\") pod \"dnsmasq-dns-698758b865-sl7jl\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.505406 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-sl7jl\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.505765 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-sl7jl\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.506192 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-dns-svc\") pod \"dnsmasq-dns-698758b865-sl7jl\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.532524 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7qdv\" (UniqueName: \"kubernetes.io/projected/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-kube-api-access-w7qdv\") pod \"dnsmasq-dns-698758b865-sl7jl\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.554004 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:35 crc kubenswrapper[4902]: I1202 14:34:35.984640 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-sl7jl"] Dec 02 14:34:35 crc kubenswrapper[4902]: W1202 14:34:35.988555 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee866db4_a0e7_4aad_8a8f_cd0e81b36c8a.slice/crio-4e46659c2ab8a4fede5b3bc9469ab35752d26a0dd57b4c169cceafff0b21508e WatchSource:0}: Error finding container 4e46659c2ab8a4fede5b3bc9469ab35752d26a0dd57b4c169cceafff0b21508e: Status 404 returned error can't find the container with id 4e46659c2ab8a4fede5b3bc9469ab35752d26a0dd57b4c169cceafff0b21508e Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.281957 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.287811 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.289960 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.290356 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.290534 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-45fwh" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.292078 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.304273 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.418989 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.419127 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/0a523405-44a7-49cc-ae19-25ebbdbc8d73-lock\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.419165 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.419191 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/0a523405-44a7-49cc-ae19-25ebbdbc8d73-cache\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.419267 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fp995\" (UniqueName: \"kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-kube-api-access-fp995\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.520824 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fp995\" (UniqueName: \"kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-kube-api-access-fp995\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.520961 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.520993 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/0a523405-44a7-49cc-ae19-25ebbdbc8d73-lock\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.521018 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.521036 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/0a523405-44a7-49cc-ae19-25ebbdbc8d73-cache\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: E1202 14:34:36.521215 4902 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 14:34:36 crc kubenswrapper[4902]: E1202 14:34:36.521243 4902 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 14:34:36 crc kubenswrapper[4902]: E1202 14:34:36.521292 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift podName:0a523405-44a7-49cc-ae19-25ebbdbc8d73 nodeName:}" failed. No retries permitted until 2025-12-02 14:34:37.021274216 +0000 UTC m=+1108.212582925 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift") pod "swift-storage-0" (UID: "0a523405-44a7-49cc-ae19-25ebbdbc8d73") : configmap "swift-ring-files" not found Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.521422 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.521494 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/0a523405-44a7-49cc-ae19-25ebbdbc8d73-cache\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.521536 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/0a523405-44a7-49cc-ae19-25ebbdbc8d73-lock\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.542371 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fp995\" (UniqueName: \"kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-kube-api-access-fp995\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.546426 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.779081 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-r6sk4"] Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.780452 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.782715 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.784324 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.784457 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.836681 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-4x7nc"] Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.838072 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.846008 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ff122a60-3f1a-40cb-b1e6-e871037f63c6","Type":"ContainerStarted","Data":"1d4be9a339eea114e4633ac9750411d23f50c0b73b9b53423f84dd1dd202d448"} Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.849726 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-sl7jl" event={"ID":"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a","Type":"ContainerStarted","Data":"4e46659c2ab8a4fede5b3bc9469ab35752d26a0dd57b4c169cceafff0b21508e"} Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.852503 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"3525dcb7-da07-49a6-8786-5e046303b028","Type":"ContainerStarted","Data":"cf0e1fc2d1f384d487da1750855d87fa182f10d3a8f58ab2d68c4b34dd4049b2"} Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.854086 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-r6sk4"] Dec 02 14:34:36 crc kubenswrapper[4902]: E1202 14:34:36.854648 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-82fct ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-82fct ring-data-devices scripts swiftconf]: context canceled" pod="openstack/swift-ring-rebalance-r6sk4" podUID="66e91e78-a202-42d0-b282-5c192e4613f3" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.864885 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-4x7nc"] Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.883941 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-r6sk4"] Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.929245 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-swiftconf\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.929314 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-combined-ca-bundle\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.929345 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82fct\" (UniqueName: \"kubernetes.io/projected/66e91e78-a202-42d0-b282-5c192e4613f3-kube-api-access-82fct\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.929389 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-swiftconf\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.929442 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66e91e78-a202-42d0-b282-5c192e4613f3-etc-swift\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.929474 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3f809093-c39f-40fe-a785-69a1edd2bdf9-ring-data-devices\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.929532 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66e91e78-a202-42d0-b282-5c192e4613f3-scripts\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.929604 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f809093-c39f-40fe-a785-69a1edd2bdf9-scripts\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.929652 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66e91e78-a202-42d0-b282-5c192e4613f3-ring-data-devices\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.929672 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-combined-ca-bundle\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.929709 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-dispersionconf\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.929736 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-dispersionconf\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.929755 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3f809093-c39f-40fe-a785-69a1edd2bdf9-etc-swift\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:36 crc kubenswrapper[4902]: I1202 14:34:36.929784 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swtsm\" (UniqueName: \"kubernetes.io/projected/3f809093-c39f-40fe-a785-69a1edd2bdf9-kube-api-access-swtsm\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.031201 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swtsm\" (UniqueName: \"kubernetes.io/projected/3f809093-c39f-40fe-a785-69a1edd2bdf9-kube-api-access-swtsm\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.031260 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-swiftconf\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.031292 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-combined-ca-bundle\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.031311 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82fct\" (UniqueName: \"kubernetes.io/projected/66e91e78-a202-42d0-b282-5c192e4613f3-kube-api-access-82fct\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.031343 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-swiftconf\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.031366 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66e91e78-a202-42d0-b282-5c192e4613f3-etc-swift\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.031389 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3f809093-c39f-40fe-a785-69a1edd2bdf9-ring-data-devices\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.031416 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66e91e78-a202-42d0-b282-5c192e4613f3-scripts\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.031457 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f809093-c39f-40fe-a785-69a1edd2bdf9-scripts\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.031483 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.031499 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66e91e78-a202-42d0-b282-5c192e4613f3-ring-data-devices\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.031517 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-combined-ca-bundle\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.031544 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-dispersionconf\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.031588 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-dispersionconf\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.031607 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3f809093-c39f-40fe-a785-69a1edd2bdf9-etc-swift\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.032016 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3f809093-c39f-40fe-a785-69a1edd2bdf9-etc-swift\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: E1202 14:34:37.032112 4902 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 14:34:37 crc kubenswrapper[4902]: E1202 14:34:37.032131 4902 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 14:34:37 crc kubenswrapper[4902]: E1202 14:34:37.032171 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift podName:0a523405-44a7-49cc-ae19-25ebbdbc8d73 nodeName:}" failed. No retries permitted until 2025-12-02 14:34:38.03215801 +0000 UTC m=+1109.223466729 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift") pod "swift-storage-0" (UID: "0a523405-44a7-49cc-ae19-25ebbdbc8d73") : configmap "swift-ring-files" not found Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.032350 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f809093-c39f-40fe-a785-69a1edd2bdf9-scripts\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.032586 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66e91e78-a202-42d0-b282-5c192e4613f3-etc-swift\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.032947 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3f809093-c39f-40fe-a785-69a1edd2bdf9-ring-data-devices\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.033045 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66e91e78-a202-42d0-b282-5c192e4613f3-ring-data-devices\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.033795 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66e91e78-a202-42d0-b282-5c192e4613f3-scripts\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.036642 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-combined-ca-bundle\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.036826 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-dispersionconf\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.036895 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-dispersionconf\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.037291 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-combined-ca-bundle\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.050252 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-swiftconf\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.051814 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swtsm\" (UniqueName: \"kubernetes.io/projected/3f809093-c39f-40fe-a785-69a1edd2bdf9-kube-api-access-swtsm\") pod \"swift-ring-rebalance-4x7nc\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.053150 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-swiftconf\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.053542 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82fct\" (UniqueName: \"kubernetes.io/projected/66e91e78-a202-42d0-b282-5c192e4613f3-kube-api-access-82fct\") pod \"swift-ring-rebalance-r6sk4\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.152002 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.605529 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-4x7nc"] Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.861480 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-68q42" event={"ID":"a39354f8-da6e-43a2-9a3d-49e42af19590","Type":"ContainerStarted","Data":"3cf677cd63f37c12e274ea89d6ae1aa299da4eb4e25ec5282e85d552640d33d4"} Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.863380 4902 generic.go:334] "Generic (PLEG): container finished" podID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerID="aff94390a576c9473b54c81e832e792fd589295cff84eb9feebe60201bc82327" exitCode=0 Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.863501 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.863498 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0","Type":"ContainerDied","Data":"aff94390a576c9473b54c81e832e792fd589295cff84eb9feebe60201bc82327"} Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.874912 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.886504 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-68q42" podStartSLOduration=7.8864885860000005 podStartE2EDuration="7.886488586s" podCreationTimestamp="2025-12-02 14:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:34:37.884014667 +0000 UTC m=+1109.075323376" watchObservedRunningTime="2025-12-02 14:34:37.886488586 +0000 UTC m=+1109.077797295" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.937346 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=30.082996568 podStartE2EDuration="36.937330205s" podCreationTimestamp="2025-12-02 14:34:01 +0000 UTC" firstStartedPulling="2025-12-02 14:34:17.009483856 +0000 UTC m=+1088.200792565" lastFinishedPulling="2025-12-02 14:34:23.863817453 +0000 UTC m=+1095.055126202" observedRunningTime="2025-12-02 14:34:37.933499658 +0000 UTC m=+1109.124808367" watchObservedRunningTime="2025-12-02 14:34:37.937330205 +0000 UTC m=+1109.128638914" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.939834 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=31.092539155 podStartE2EDuration="37.939828845s" podCreationTimestamp="2025-12-02 14:34:00 +0000 UTC" firstStartedPulling="2025-12-02 14:34:17.02254004 +0000 UTC m=+1088.213848749" lastFinishedPulling="2025-12-02 14:34:23.86982972 +0000 UTC m=+1095.061138439" observedRunningTime="2025-12-02 14:34:37.914002724 +0000 UTC m=+1109.105311433" watchObservedRunningTime="2025-12-02 14:34:37.939828845 +0000 UTC m=+1109.131137554" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.947651 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-combined-ca-bundle\") pod \"66e91e78-a202-42d0-b282-5c192e4613f3\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.947788 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-dispersionconf\") pod \"66e91e78-a202-42d0-b282-5c192e4613f3\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.948001 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66e91e78-a202-42d0-b282-5c192e4613f3-ring-data-devices\") pod \"66e91e78-a202-42d0-b282-5c192e4613f3\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.948061 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-swiftconf\") pod \"66e91e78-a202-42d0-b282-5c192e4613f3\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.948122 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66e91e78-a202-42d0-b282-5c192e4613f3-etc-swift\") pod \"66e91e78-a202-42d0-b282-5c192e4613f3\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.948193 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82fct\" (UniqueName: \"kubernetes.io/projected/66e91e78-a202-42d0-b282-5c192e4613f3-kube-api-access-82fct\") pod \"66e91e78-a202-42d0-b282-5c192e4613f3\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.948248 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66e91e78-a202-42d0-b282-5c192e4613f3-scripts\") pod \"66e91e78-a202-42d0-b282-5c192e4613f3\" (UID: \"66e91e78-a202-42d0-b282-5c192e4613f3\") " Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.948384 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66e91e78-a202-42d0-b282-5c192e4613f3-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "66e91e78-a202-42d0-b282-5c192e4613f3" (UID: "66e91e78-a202-42d0-b282-5c192e4613f3"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.948585 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66e91e78-a202-42d0-b282-5c192e4613f3-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "66e91e78-a202-42d0-b282-5c192e4613f3" (UID: "66e91e78-a202-42d0-b282-5c192e4613f3"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.948799 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66e91e78-a202-42d0-b282-5c192e4613f3-scripts" (OuterVolumeSpecName: "scripts") pod "66e91e78-a202-42d0-b282-5c192e4613f3" (UID: "66e91e78-a202-42d0-b282-5c192e4613f3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.949983 4902 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66e91e78-a202-42d0-b282-5c192e4613f3-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.950011 4902 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66e91e78-a202-42d0-b282-5c192e4613f3-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.950021 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66e91e78-a202-42d0-b282-5c192e4613f3-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.954697 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "66e91e78-a202-42d0-b282-5c192e4613f3" (UID: "66e91e78-a202-42d0-b282-5c192e4613f3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.954732 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "66e91e78-a202-42d0-b282-5c192e4613f3" (UID: "66e91e78-a202-42d0-b282-5c192e4613f3"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.954789 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66e91e78-a202-42d0-b282-5c192e4613f3-kube-api-access-82fct" (OuterVolumeSpecName: "kube-api-access-82fct") pod "66e91e78-a202-42d0-b282-5c192e4613f3" (UID: "66e91e78-a202-42d0-b282-5c192e4613f3"). InnerVolumeSpecName "kube-api-access-82fct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:37 crc kubenswrapper[4902]: I1202 14:34:37.954848 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "66e91e78-a202-42d0-b282-5c192e4613f3" (UID: "66e91e78-a202-42d0-b282-5c192e4613f3"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:34:38 crc kubenswrapper[4902]: I1202 14:34:38.051797 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:38 crc kubenswrapper[4902]: I1202 14:34:38.051933 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:38 crc kubenswrapper[4902]: I1202 14:34:38.051955 4902 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:38 crc kubenswrapper[4902]: I1202 14:34:38.051968 4902 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66e91e78-a202-42d0-b282-5c192e4613f3-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:38 crc kubenswrapper[4902]: I1202 14:34:38.051981 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82fct\" (UniqueName: \"kubernetes.io/projected/66e91e78-a202-42d0-b282-5c192e4613f3-kube-api-access-82fct\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:38 crc kubenswrapper[4902]: E1202 14:34:38.051994 4902 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 14:34:38 crc kubenswrapper[4902]: E1202 14:34:38.052014 4902 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 14:34:38 crc kubenswrapper[4902]: E1202 14:34:38.052064 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift podName:0a523405-44a7-49cc-ae19-25ebbdbc8d73 nodeName:}" failed. No retries permitted until 2025-12-02 14:34:40.052048855 +0000 UTC m=+1111.243357564 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift") pod "swift-storage-0" (UID: "0a523405-44a7-49cc-ae19-25ebbdbc8d73") : configmap "swift-ring-files" not found Dec 02 14:34:38 crc kubenswrapper[4902]: E1202 14:34:38.376331 4902 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.251:47538->38.102.83.251:38083: write tcp 38.102.83.251:47538->38.102.83.251:38083: write: broken pipe Dec 02 14:34:38 crc kubenswrapper[4902]: I1202 14:34:38.877228 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-4x7nc" event={"ID":"3f809093-c39f-40fe-a785-69a1edd2bdf9","Type":"ContainerStarted","Data":"344b24eef3cee9f002f40414da597ef3ff1068133601421207c71ef9f017d5fe"} Dec 02 14:34:38 crc kubenswrapper[4902]: I1202 14:34:38.877305 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-r6sk4" Dec 02 14:34:38 crc kubenswrapper[4902]: I1202 14:34:38.942901 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-r6sk4"] Dec 02 14:34:38 crc kubenswrapper[4902]: I1202 14:34:38.952633 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-r6sk4"] Dec 02 14:34:39 crc kubenswrapper[4902]: I1202 14:34:39.123071 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66e91e78-a202-42d0-b282-5c192e4613f3" path="/var/lib/kubelet/pods/66e91e78-a202-42d0-b282-5c192e4613f3/volumes" Dec 02 14:34:39 crc kubenswrapper[4902]: E1202 14:34:39.809067 4902 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.251:47548->38.102.83.251:38083: write tcp 38.102.83.251:47548->38.102.83.251:38083: write: broken pipe Dec 02 14:34:39 crc kubenswrapper[4902]: I1202 14:34:39.887197 4902 generic.go:334] "Generic (PLEG): container finished" podID="ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" containerID="e3a8ee5f87c5570cbacf6f2528a8af9027bfaff15fe1940fefec9f0f5d2646fe" exitCode=0 Dec 02 14:34:39 crc kubenswrapper[4902]: I1202 14:34:39.887279 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-sl7jl" event={"ID":"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a","Type":"ContainerDied","Data":"e3a8ee5f87c5570cbacf6f2528a8af9027bfaff15fe1940fefec9f0f5d2646fe"} Dec 02 14:34:39 crc kubenswrapper[4902]: I1202 14:34:39.894408 4902 generic.go:334] "Generic (PLEG): container finished" podID="5f338b3f-0275-42a1-be04-016bd343525c" containerID="7d189b4b21cf02a8f542c21af5cf6ef732d2e164b4e4f9bae97f207caccc0e85" exitCode=0 Dec 02 14:34:39 crc kubenswrapper[4902]: I1202 14:34:39.894480 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" event={"ID":"5f338b3f-0275-42a1-be04-016bd343525c","Type":"ContainerDied","Data":"7d189b4b21cf02a8f542c21af5cf6ef732d2e164b4e4f9bae97f207caccc0e85"} Dec 02 14:34:39 crc kubenswrapper[4902]: I1202 14:34:39.897724 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"412330a8-4fac-4700-a5f8-5054af56f44b","Type":"ContainerStarted","Data":"b51bcfadc001995e5c8403b71fe7c69343324e8e053ac2e6b55a2e9ebf164e54"} Dec 02 14:34:39 crc kubenswrapper[4902]: I1202 14:34:39.897750 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"412330a8-4fac-4700-a5f8-5054af56f44b","Type":"ContainerStarted","Data":"503da08e24830bf5be1ab9d80d80483fd3827a6c2845a4bd4d6629d39ab02c4f"} Dec 02 14:34:39 crc kubenswrapper[4902]: I1202 14:34:39.898210 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 02 14:34:39 crc kubenswrapper[4902]: I1202 14:34:39.899580 4902 generic.go:334] "Generic (PLEG): container finished" podID="07017d27-f49f-4452-bfd7-cbc688d4242a" containerID="08d566a950b3856466a2d2eca086026ec8213415e5e4b9d712a903053dca06c0" exitCode=0 Dec 02 14:34:39 crc kubenswrapper[4902]: I1202 14:34:39.899627 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" event={"ID":"07017d27-f49f-4452-bfd7-cbc688d4242a","Type":"ContainerDied","Data":"08d566a950b3856466a2d2eca086026ec8213415e5e4b9d712a903053dca06c0"} Dec 02 14:34:39 crc kubenswrapper[4902]: I1202 14:34:39.901538 4902 generic.go:334] "Generic (PLEG): container finished" podID="79e9491d-4b96-4c77-b817-78b2287dd0d2" containerID="ba0c98475b9d3ff69757996e0010b4b9f3494691f8b4aac47a370d3203f52d3e" exitCode=0 Dec 02 14:34:39 crc kubenswrapper[4902]: I1202 14:34:39.901585 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-864w4" event={"ID":"79e9491d-4b96-4c77-b817-78b2287dd0d2","Type":"ContainerDied","Data":"ba0c98475b9d3ff69757996e0010b4b9f3494691f8b4aac47a370d3203f52d3e"} Dec 02 14:34:39 crc kubenswrapper[4902]: I1202 14:34:39.964088 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.319356163 podStartE2EDuration="9.964069471s" podCreationTimestamp="2025-12-02 14:34:30 +0000 UTC" firstStartedPulling="2025-12-02 14:34:31.8529274 +0000 UTC m=+1103.044236119" lastFinishedPulling="2025-12-02 14:34:38.497640718 +0000 UTC m=+1109.688949427" observedRunningTime="2025-12-02 14:34:39.959274307 +0000 UTC m=+1111.150583016" watchObservedRunningTime="2025-12-02 14:34:39.964069471 +0000 UTC m=+1111.155378180" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.089695 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:40 crc kubenswrapper[4902]: E1202 14:34:40.090032 4902 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 14:34:40 crc kubenswrapper[4902]: E1202 14:34:40.090048 4902 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 14:34:40 crc kubenswrapper[4902]: E1202 14:34:40.090107 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift podName:0a523405-44a7-49cc-ae19-25ebbdbc8d73 nodeName:}" failed. No retries permitted until 2025-12-02 14:34:44.090093717 +0000 UTC m=+1115.281402426 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift") pod "swift-storage-0" (UID: "0a523405-44a7-49cc-ae19-25ebbdbc8d73") : configmap "swift-ring-files" not found Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.586237 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.591098 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.702465 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-dns-svc\") pod \"79e9491d-4b96-4c77-b817-78b2287dd0d2\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.702576 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-config\") pod \"79e9491d-4b96-4c77-b817-78b2287dd0d2\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.702625 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bpv5\" (UniqueName: \"kubernetes.io/projected/79e9491d-4b96-4c77-b817-78b2287dd0d2-kube-api-access-9bpv5\") pod \"79e9491d-4b96-4c77-b817-78b2287dd0d2\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.702701 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07017d27-f49f-4452-bfd7-cbc688d4242a-config\") pod \"07017d27-f49f-4452-bfd7-cbc688d4242a\" (UID: \"07017d27-f49f-4452-bfd7-cbc688d4242a\") " Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.702723 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-ovsdbserver-nb\") pod \"79e9491d-4b96-4c77-b817-78b2287dd0d2\" (UID: \"79e9491d-4b96-4c77-b817-78b2287dd0d2\") " Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.702832 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07017d27-f49f-4452-bfd7-cbc688d4242a-dns-svc\") pod \"07017d27-f49f-4452-bfd7-cbc688d4242a\" (UID: \"07017d27-f49f-4452-bfd7-cbc688d4242a\") " Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.702857 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xbt4\" (UniqueName: \"kubernetes.io/projected/07017d27-f49f-4452-bfd7-cbc688d4242a-kube-api-access-6xbt4\") pod \"07017d27-f49f-4452-bfd7-cbc688d4242a\" (UID: \"07017d27-f49f-4452-bfd7-cbc688d4242a\") " Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.707518 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07017d27-f49f-4452-bfd7-cbc688d4242a-kube-api-access-6xbt4" (OuterVolumeSpecName: "kube-api-access-6xbt4") pod "07017d27-f49f-4452-bfd7-cbc688d4242a" (UID: "07017d27-f49f-4452-bfd7-cbc688d4242a"). InnerVolumeSpecName "kube-api-access-6xbt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.716132 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79e9491d-4b96-4c77-b817-78b2287dd0d2-kube-api-access-9bpv5" (OuterVolumeSpecName: "kube-api-access-9bpv5") pod "79e9491d-4b96-4c77-b817-78b2287dd0d2" (UID: "79e9491d-4b96-4c77-b817-78b2287dd0d2"). InnerVolumeSpecName "kube-api-access-9bpv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.728737 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-config" (OuterVolumeSpecName: "config") pod "79e9491d-4b96-4c77-b817-78b2287dd0d2" (UID: "79e9491d-4b96-4c77-b817-78b2287dd0d2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.737510 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "79e9491d-4b96-4c77-b817-78b2287dd0d2" (UID: "79e9491d-4b96-4c77-b817-78b2287dd0d2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.740920 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07017d27-f49f-4452-bfd7-cbc688d4242a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "07017d27-f49f-4452-bfd7-cbc688d4242a" (UID: "07017d27-f49f-4452-bfd7-cbc688d4242a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.742526 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "79e9491d-4b96-4c77-b817-78b2287dd0d2" (UID: "79e9491d-4b96-4c77-b817-78b2287dd0d2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.755666 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07017d27-f49f-4452-bfd7-cbc688d4242a-config" (OuterVolumeSpecName: "config") pod "07017d27-f49f-4452-bfd7-cbc688d4242a" (UID: "07017d27-f49f-4452-bfd7-cbc688d4242a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.804250 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.804290 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bpv5\" (UniqueName: \"kubernetes.io/projected/79e9491d-4b96-4c77-b817-78b2287dd0d2-kube-api-access-9bpv5\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.804301 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07017d27-f49f-4452-bfd7-cbc688d4242a-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.804313 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.804327 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07017d27-f49f-4452-bfd7-cbc688d4242a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.804338 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xbt4\" (UniqueName: \"kubernetes.io/projected/07017d27-f49f-4452-bfd7-cbc688d4242a-kube-api-access-6xbt4\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.804349 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79e9491d-4b96-4c77-b817-78b2287dd0d2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.919364 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-864w4" event={"ID":"79e9491d-4b96-4c77-b817-78b2287dd0d2","Type":"ContainerDied","Data":"a889afa899bdaf45ef439815e73157ddcea3b9db1651880dac5a958f5f239a74"} Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.919410 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-864w4" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.923055 4902 scope.go:117] "RemoveContainer" containerID="ba0c98475b9d3ff69757996e0010b4b9f3494691f8b4aac47a370d3203f52d3e" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.925321 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" event={"ID":"07017d27-f49f-4452-bfd7-cbc688d4242a","Type":"ContainerDied","Data":"ce65b18750baff4af328168f6cc372ac1ffbdfbf45218e7b64f3d583f6f8d171"} Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.925350 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-bf4j8" Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.982390 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-864w4"] Dec 02 14:34:40 crc kubenswrapper[4902]: I1202 14:34:40.995401 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-864w4"] Dec 02 14:34:41 crc kubenswrapper[4902]: I1202 14:34:41.022364 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bf4j8"] Dec 02 14:34:41 crc kubenswrapper[4902]: I1202 14:34:41.029015 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bf4j8"] Dec 02 14:34:41 crc kubenswrapper[4902]: I1202 14:34:41.121980 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07017d27-f49f-4452-bfd7-cbc688d4242a" path="/var/lib/kubelet/pods/07017d27-f49f-4452-bfd7-cbc688d4242a/volumes" Dec 02 14:34:41 crc kubenswrapper[4902]: I1202 14:34:41.122517 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79e9491d-4b96-4c77-b817-78b2287dd0d2" path="/var/lib/kubelet/pods/79e9491d-4b96-4c77-b817-78b2287dd0d2/volumes" Dec 02 14:34:41 crc kubenswrapper[4902]: I1202 14:34:41.777756 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 02 14:34:41 crc kubenswrapper[4902]: I1202 14:34:41.778066 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 02 14:34:43 crc kubenswrapper[4902]: I1202 14:34:43.521308 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:43 crc kubenswrapper[4902]: I1202 14:34:43.521602 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:43 crc kubenswrapper[4902]: I1202 14:34:43.596250 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:43 crc kubenswrapper[4902]: I1202 14:34:43.995296 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 02 14:34:44 crc kubenswrapper[4902]: I1202 14:34:44.026780 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 02 14:34:44 crc kubenswrapper[4902]: I1202 14:34:44.094815 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 02 14:34:44 crc kubenswrapper[4902]: I1202 14:34:44.166626 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:44 crc kubenswrapper[4902]: E1202 14:34:44.166855 4902 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 14:34:44 crc kubenswrapper[4902]: E1202 14:34:44.166874 4902 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 14:34:44 crc kubenswrapper[4902]: E1202 14:34:44.166953 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift podName:0a523405-44a7-49cc-ae19-25ebbdbc8d73 nodeName:}" failed. No retries permitted until 2025-12-02 14:34:52.166929761 +0000 UTC m=+1123.358238470 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift") pod "swift-storage-0" (UID: "0a523405-44a7-49cc-ae19-25ebbdbc8d73") : configmap "swift-ring-files" not found Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.365801 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-create-kwmmj"] Dec 02 14:34:45 crc kubenswrapper[4902]: E1202 14:34:45.366457 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79e9491d-4b96-4c77-b817-78b2287dd0d2" containerName="init" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.366473 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="79e9491d-4b96-4c77-b817-78b2287dd0d2" containerName="init" Dec 02 14:34:45 crc kubenswrapper[4902]: E1202 14:34:45.366493 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07017d27-f49f-4452-bfd7-cbc688d4242a" containerName="init" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.366500 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="07017d27-f49f-4452-bfd7-cbc688d4242a" containerName="init" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.366746 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="79e9491d-4b96-4c77-b817-78b2287dd0d2" containerName="init" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.366772 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="07017d27-f49f-4452-bfd7-cbc688d4242a" containerName="init" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.367500 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-kwmmj" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.377302 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-832f-account-create-update-8ns89"] Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.378508 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-832f-account-create-update-8ns89" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.387557 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-db-secret" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.410325 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-kwmmj"] Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.424595 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-832f-account-create-update-8ns89"] Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.488925 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64ls8\" (UniqueName: \"kubernetes.io/projected/1df2b5da-f651-420f-b497-67bfab58faad-kube-api-access-64ls8\") pod \"watcher-db-create-kwmmj\" (UID: \"1df2b5da-f651-420f-b497-67bfab58faad\") " pod="openstack/watcher-db-create-kwmmj" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.488986 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1df2b5da-f651-420f-b497-67bfab58faad-operator-scripts\") pod \"watcher-db-create-kwmmj\" (UID: \"1df2b5da-f651-420f-b497-67bfab58faad\") " pod="openstack/watcher-db-create-kwmmj" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.489037 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/20fead8d-5003-473f-b71d-096c2877181d-operator-scripts\") pod \"watcher-832f-account-create-update-8ns89\" (UID: \"20fead8d-5003-473f-b71d-096c2877181d\") " pod="openstack/watcher-832f-account-create-update-8ns89" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.489111 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njph5\" (UniqueName: \"kubernetes.io/projected/20fead8d-5003-473f-b71d-096c2877181d-kube-api-access-njph5\") pod \"watcher-832f-account-create-update-8ns89\" (UID: \"20fead8d-5003-473f-b71d-096c2877181d\") " pod="openstack/watcher-832f-account-create-update-8ns89" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.591057 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64ls8\" (UniqueName: \"kubernetes.io/projected/1df2b5da-f651-420f-b497-67bfab58faad-kube-api-access-64ls8\") pod \"watcher-db-create-kwmmj\" (UID: \"1df2b5da-f651-420f-b497-67bfab58faad\") " pod="openstack/watcher-db-create-kwmmj" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.591180 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1df2b5da-f651-420f-b497-67bfab58faad-operator-scripts\") pod \"watcher-db-create-kwmmj\" (UID: \"1df2b5da-f651-420f-b497-67bfab58faad\") " pod="openstack/watcher-db-create-kwmmj" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.591247 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/20fead8d-5003-473f-b71d-096c2877181d-operator-scripts\") pod \"watcher-832f-account-create-update-8ns89\" (UID: \"20fead8d-5003-473f-b71d-096c2877181d\") " pod="openstack/watcher-832f-account-create-update-8ns89" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.591452 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njph5\" (UniqueName: \"kubernetes.io/projected/20fead8d-5003-473f-b71d-096c2877181d-kube-api-access-njph5\") pod \"watcher-832f-account-create-update-8ns89\" (UID: \"20fead8d-5003-473f-b71d-096c2877181d\") " pod="openstack/watcher-832f-account-create-update-8ns89" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.591977 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1df2b5da-f651-420f-b497-67bfab58faad-operator-scripts\") pod \"watcher-db-create-kwmmj\" (UID: \"1df2b5da-f651-420f-b497-67bfab58faad\") " pod="openstack/watcher-db-create-kwmmj" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.593374 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/20fead8d-5003-473f-b71d-096c2877181d-operator-scripts\") pod \"watcher-832f-account-create-update-8ns89\" (UID: \"20fead8d-5003-473f-b71d-096c2877181d\") " pod="openstack/watcher-832f-account-create-update-8ns89" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.612496 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njph5\" (UniqueName: \"kubernetes.io/projected/20fead8d-5003-473f-b71d-096c2877181d-kube-api-access-njph5\") pod \"watcher-832f-account-create-update-8ns89\" (UID: \"20fead8d-5003-473f-b71d-096c2877181d\") " pod="openstack/watcher-832f-account-create-update-8ns89" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.622186 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64ls8\" (UniqueName: \"kubernetes.io/projected/1df2b5da-f651-420f-b497-67bfab58faad-kube-api-access-64ls8\") pod \"watcher-db-create-kwmmj\" (UID: \"1df2b5da-f651-420f-b497-67bfab58faad\") " pod="openstack/watcher-db-create-kwmmj" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.695290 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-kwmmj" Dec 02 14:34:45 crc kubenswrapper[4902]: I1202 14:34:45.708630 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-832f-account-create-update-8ns89" Dec 02 14:34:46 crc kubenswrapper[4902]: I1202 14:34:46.374172 4902 scope.go:117] "RemoveContainer" containerID="08d566a950b3856466a2d2eca086026ec8213415e5e4b9d712a903053dca06c0" Dec 02 14:34:46 crc kubenswrapper[4902]: I1202 14:34:46.872424 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-832f-account-create-update-8ns89"] Dec 02 14:34:46 crc kubenswrapper[4902]: I1202 14:34:46.980474 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-kwmmj"] Dec 02 14:34:46 crc kubenswrapper[4902]: I1202 14:34:46.986414 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-4x7nc" event={"ID":"3f809093-c39f-40fe-a785-69a1edd2bdf9","Type":"ContainerStarted","Data":"ec45adfceb7bf6c0df8aa0c1e7b0ccde5cc75a98df28a3e432b0590a8f3b564f"} Dec 02 14:34:46 crc kubenswrapper[4902]: I1202 14:34:46.990469 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-sl7jl" event={"ID":"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a","Type":"ContainerStarted","Data":"11e3c5ae1feb2872ebb2faa127d97f7d977b2e6386b642120c965de90fa72407"} Dec 02 14:34:46 crc kubenswrapper[4902]: I1202 14:34:46.990606 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:46 crc kubenswrapper[4902]: I1202 14:34:46.992332 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" event={"ID":"5f338b3f-0275-42a1-be04-016bd343525c","Type":"ContainerStarted","Data":"1a97c385d9240d977ec98d155f287c25b1eff237d41e4faea9b1ebd3290f555f"} Dec 02 14:34:46 crc kubenswrapper[4902]: I1202 14:34:46.992388 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:46 crc kubenswrapper[4902]: I1202 14:34:46.994020 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-832f-account-create-update-8ns89" event={"ID":"20fead8d-5003-473f-b71d-096c2877181d","Type":"ContainerStarted","Data":"1b732f599683455e6508749672e2e1edad58a321c59e16d4e42041fb289d3f45"} Dec 02 14:34:47 crc kubenswrapper[4902]: I1202 14:34:47.000205 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0","Type":"ContainerStarted","Data":"85eeb6d07896470dc279bd1a847bb07e307765bbb655f25a796af1cf61b990be"} Dec 02 14:34:47 crc kubenswrapper[4902]: I1202 14:34:47.014472 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-4x7nc" podStartSLOduration=2.612561363 podStartE2EDuration="11.014452637s" podCreationTimestamp="2025-12-02 14:34:36 +0000 UTC" firstStartedPulling="2025-12-02 14:34:38.028762686 +0000 UTC m=+1109.220071395" lastFinishedPulling="2025-12-02 14:34:46.43065396 +0000 UTC m=+1117.621962669" observedRunningTime="2025-12-02 14:34:47.005853047 +0000 UTC m=+1118.197161756" watchObservedRunningTime="2025-12-02 14:34:47.014452637 +0000 UTC m=+1118.205761346" Dec 02 14:34:47 crc kubenswrapper[4902]: I1202 14:34:47.031994 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" podStartSLOduration=10.45613363 podStartE2EDuration="17.031967676s" podCreationTimestamp="2025-12-02 14:34:30 +0000 UTC" firstStartedPulling="2025-12-02 14:34:31.923866209 +0000 UTC m=+1103.115174918" lastFinishedPulling="2025-12-02 14:34:38.499700255 +0000 UTC m=+1109.691008964" observedRunningTime="2025-12-02 14:34:47.026163034 +0000 UTC m=+1118.217471743" watchObservedRunningTime="2025-12-02 14:34:47.031967676 +0000 UTC m=+1118.223276385" Dec 02 14:34:47 crc kubenswrapper[4902]: I1202 14:34:47.042132 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-sl7jl" podStartSLOduration=9.517839792 podStartE2EDuration="12.042112819s" podCreationTimestamp="2025-12-02 14:34:35 +0000 UTC" firstStartedPulling="2025-12-02 14:34:35.99092144 +0000 UTC m=+1107.182230149" lastFinishedPulling="2025-12-02 14:34:38.515194477 +0000 UTC m=+1109.706503176" observedRunningTime="2025-12-02 14:34:47.040458293 +0000 UTC m=+1118.231767002" watchObservedRunningTime="2025-12-02 14:34:47.042112819 +0000 UTC m=+1118.233421528" Dec 02 14:34:48 crc kubenswrapper[4902]: I1202 14:34:48.014304 4902 generic.go:334] "Generic (PLEG): container finished" podID="1df2b5da-f651-420f-b497-67bfab58faad" containerID="ab8590788020831da214658e16998074356ce9e4d20a6dbba0527b9cd2ae7cdd" exitCode=0 Dec 02 14:34:48 crc kubenswrapper[4902]: I1202 14:34:48.014427 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-kwmmj" event={"ID":"1df2b5da-f651-420f-b497-67bfab58faad","Type":"ContainerDied","Data":"ab8590788020831da214658e16998074356ce9e4d20a6dbba0527b9cd2ae7cdd"} Dec 02 14:34:48 crc kubenswrapper[4902]: I1202 14:34:48.014781 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-kwmmj" event={"ID":"1df2b5da-f651-420f-b497-67bfab58faad","Type":"ContainerStarted","Data":"3ab7a4525a002a6889ae7c5d98326a87a9fbd01950f6310825fa3ece8884fdc4"} Dec 02 14:34:48 crc kubenswrapper[4902]: I1202 14:34:48.017064 4902 generic.go:334] "Generic (PLEG): container finished" podID="20fead8d-5003-473f-b71d-096c2877181d" containerID="97a4f0b0f3ebfc2371d584e1bc7b312d2336cc0d91fcb33d52e89fc3fe056094" exitCode=0 Dec 02 14:34:48 crc kubenswrapper[4902]: I1202 14:34:48.017192 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-832f-account-create-update-8ns89" event={"ID":"20fead8d-5003-473f-b71d-096c2877181d","Type":"ContainerDied","Data":"97a4f0b0f3ebfc2371d584e1bc7b312d2336cc0d91fcb33d52e89fc3fe056094"} Dec 02 14:34:49 crc kubenswrapper[4902]: I1202 14:34:49.550225 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-832f-account-create-update-8ns89" Dec 02 14:34:49 crc kubenswrapper[4902]: I1202 14:34:49.556421 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-kwmmj" Dec 02 14:34:49 crc kubenswrapper[4902]: I1202 14:34:49.599754 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64ls8\" (UniqueName: \"kubernetes.io/projected/1df2b5da-f651-420f-b497-67bfab58faad-kube-api-access-64ls8\") pod \"1df2b5da-f651-420f-b497-67bfab58faad\" (UID: \"1df2b5da-f651-420f-b497-67bfab58faad\") " Dec 02 14:34:49 crc kubenswrapper[4902]: I1202 14:34:49.599886 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/20fead8d-5003-473f-b71d-096c2877181d-operator-scripts\") pod \"20fead8d-5003-473f-b71d-096c2877181d\" (UID: \"20fead8d-5003-473f-b71d-096c2877181d\") " Dec 02 14:34:49 crc kubenswrapper[4902]: I1202 14:34:49.600059 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1df2b5da-f651-420f-b497-67bfab58faad-operator-scripts\") pod \"1df2b5da-f651-420f-b497-67bfab58faad\" (UID: \"1df2b5da-f651-420f-b497-67bfab58faad\") " Dec 02 14:34:49 crc kubenswrapper[4902]: I1202 14:34:49.600109 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njph5\" (UniqueName: \"kubernetes.io/projected/20fead8d-5003-473f-b71d-096c2877181d-kube-api-access-njph5\") pod \"20fead8d-5003-473f-b71d-096c2877181d\" (UID: \"20fead8d-5003-473f-b71d-096c2877181d\") " Dec 02 14:34:49 crc kubenswrapper[4902]: I1202 14:34:49.602037 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20fead8d-5003-473f-b71d-096c2877181d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "20fead8d-5003-473f-b71d-096c2877181d" (UID: "20fead8d-5003-473f-b71d-096c2877181d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:49 crc kubenswrapper[4902]: I1202 14:34:49.602170 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1df2b5da-f651-420f-b497-67bfab58faad-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1df2b5da-f651-420f-b497-67bfab58faad" (UID: "1df2b5da-f651-420f-b497-67bfab58faad"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:49 crc kubenswrapper[4902]: I1202 14:34:49.612808 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20fead8d-5003-473f-b71d-096c2877181d-kube-api-access-njph5" (OuterVolumeSpecName: "kube-api-access-njph5") pod "20fead8d-5003-473f-b71d-096c2877181d" (UID: "20fead8d-5003-473f-b71d-096c2877181d"). InnerVolumeSpecName "kube-api-access-njph5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:49 crc kubenswrapper[4902]: I1202 14:34:49.618537 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1df2b5da-f651-420f-b497-67bfab58faad-kube-api-access-64ls8" (OuterVolumeSpecName: "kube-api-access-64ls8") pod "1df2b5da-f651-420f-b497-67bfab58faad" (UID: "1df2b5da-f651-420f-b497-67bfab58faad"). InnerVolumeSpecName "kube-api-access-64ls8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:49 crc kubenswrapper[4902]: I1202 14:34:49.702398 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1df2b5da-f651-420f-b497-67bfab58faad-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:49 crc kubenswrapper[4902]: I1202 14:34:49.702437 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njph5\" (UniqueName: \"kubernetes.io/projected/20fead8d-5003-473f-b71d-096c2877181d-kube-api-access-njph5\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:49 crc kubenswrapper[4902]: I1202 14:34:49.702448 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64ls8\" (UniqueName: \"kubernetes.io/projected/1df2b5da-f651-420f-b497-67bfab58faad-kube-api-access-64ls8\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:49 crc kubenswrapper[4902]: I1202 14:34:49.702457 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/20fead8d-5003-473f-b71d-096c2877181d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:50 crc kubenswrapper[4902]: I1202 14:34:50.036294 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-kwmmj" event={"ID":"1df2b5da-f651-420f-b497-67bfab58faad","Type":"ContainerDied","Data":"3ab7a4525a002a6889ae7c5d98326a87a9fbd01950f6310825fa3ece8884fdc4"} Dec 02 14:34:50 crc kubenswrapper[4902]: I1202 14:34:50.036541 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ab7a4525a002a6889ae7c5d98326a87a9fbd01950f6310825fa3ece8884fdc4" Dec 02 14:34:50 crc kubenswrapper[4902]: I1202 14:34:50.036329 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-kwmmj" Dec 02 14:34:50 crc kubenswrapper[4902]: I1202 14:34:50.038543 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0","Type":"ContainerStarted","Data":"377c4f65bd35a79aa651a1830b4cf49c3b1b0d04cfa5b27d66ca2ffe601db757"} Dec 02 14:34:50 crc kubenswrapper[4902]: I1202 14:34:50.040038 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-832f-account-create-update-8ns89" event={"ID":"20fead8d-5003-473f-b71d-096c2877181d","Type":"ContainerDied","Data":"1b732f599683455e6508749672e2e1edad58a321c59e16d4e42041fb289d3f45"} Dec 02 14:34:50 crc kubenswrapper[4902]: I1202 14:34:50.040084 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b732f599683455e6508749672e2e1edad58a321c59e16d4e42041fb289d3f45" Dec 02 14:34:50 crc kubenswrapper[4902]: I1202 14:34:50.040137 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-832f-account-create-update-8ns89" Dec 02 14:34:50 crc kubenswrapper[4902]: I1202 14:34:50.041905 4902 generic.go:334] "Generic (PLEG): container finished" podID="4c350efc-48d5-4e5d-acee-399252e1b24a" containerID="3e655273fe43d2b060992df0598b8087baceb35e6ab8d009013b8020e6022672" exitCode=0 Dec 02 14:34:50 crc kubenswrapper[4902]: I1202 14:34:50.041979 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4c350efc-48d5-4e5d-acee-399252e1b24a","Type":"ContainerDied","Data":"3e655273fe43d2b060992df0598b8087baceb35e6ab8d009013b8020e6022672"} Dec 02 14:34:50 crc kubenswrapper[4902]: I1202 14:34:50.043409 4902 generic.go:334] "Generic (PLEG): container finished" podID="abeb5bdf-6307-4dc9-9a97-d638817b544c" containerID="3033e15d7080893bfc76fe29831d27b48eb5732d47a6cbcd386f840438b6121c" exitCode=0 Dec 02 14:34:50 crc kubenswrapper[4902]: I1202 14:34:50.043456 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"abeb5bdf-6307-4dc9-9a97-d638817b544c","Type":"ContainerDied","Data":"3033e15d7080893bfc76fe29831d27b48eb5732d47a6cbcd386f840438b6121c"} Dec 02 14:34:50 crc kubenswrapper[4902]: I1202 14:34:50.875005 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 02 14:34:51 crc kubenswrapper[4902]: I1202 14:34:51.052294 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4c350efc-48d5-4e5d-acee-399252e1b24a","Type":"ContainerStarted","Data":"7f8066199d6c9ba8091ba87e8a8bc33ffd01c44c33e11cf954e0fcc96793aae4"} Dec 02 14:34:51 crc kubenswrapper[4902]: I1202 14:34:51.052523 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:34:51 crc kubenswrapper[4902]: I1202 14:34:51.054382 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"abeb5bdf-6307-4dc9-9a97-d638817b544c","Type":"ContainerStarted","Data":"ca53563e8c7d21f932c5be2e48a675fdbfe3f262affc86b096bc5bd76d899a60"} Dec 02 14:34:51 crc kubenswrapper[4902]: I1202 14:34:51.054686 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 02 14:34:51 crc kubenswrapper[4902]: I1202 14:34:51.086733 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.950672537 podStartE2EDuration="53.086714024s" podCreationTimestamp="2025-12-02 14:33:58 +0000 UTC" firstStartedPulling="2025-12-02 14:34:01.005884384 +0000 UTC m=+1072.197193093" lastFinishedPulling="2025-12-02 14:34:16.141925871 +0000 UTC m=+1087.333234580" observedRunningTime="2025-12-02 14:34:51.080992164 +0000 UTC m=+1122.272300893" watchObservedRunningTime="2025-12-02 14:34:51.086714024 +0000 UTC m=+1122.278022733" Dec 02 14:34:51 crc kubenswrapper[4902]: I1202 14:34:51.110953 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.010920037 podStartE2EDuration="53.110931379s" podCreationTimestamp="2025-12-02 14:33:58 +0000 UTC" firstStartedPulling="2025-12-02 14:34:00.951759574 +0000 UTC m=+1072.143068273" lastFinishedPulling="2025-12-02 14:34:16.051770886 +0000 UTC m=+1087.243079615" observedRunningTime="2025-12-02 14:34:51.109020926 +0000 UTC m=+1122.300329655" watchObservedRunningTime="2025-12-02 14:34:51.110931379 +0000 UTC m=+1122.302240088" Dec 02 14:34:52 crc kubenswrapper[4902]: I1202 14:34:52.266874 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:34:52 crc kubenswrapper[4902]: E1202 14:34:52.267360 4902 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 14:34:52 crc kubenswrapper[4902]: E1202 14:34:52.267377 4902 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 14:34:52 crc kubenswrapper[4902]: E1202 14:34:52.267426 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift podName:0a523405-44a7-49cc-ae19-25ebbdbc8d73 nodeName:}" failed. No retries permitted until 2025-12-02 14:35:08.267408166 +0000 UTC m=+1139.458716875 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift") pod "swift-storage-0" (UID: "0a523405-44a7-49cc-ae19-25ebbdbc8d73") : configmap "swift-ring-files" not found Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.037872 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-5mrvt"] Dec 02 14:34:53 crc kubenswrapper[4902]: E1202 14:34:53.038251 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20fead8d-5003-473f-b71d-096c2877181d" containerName="mariadb-account-create-update" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.038262 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="20fead8d-5003-473f-b71d-096c2877181d" containerName="mariadb-account-create-update" Dec 02 14:34:53 crc kubenswrapper[4902]: E1202 14:34:53.038281 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1df2b5da-f651-420f-b497-67bfab58faad" containerName="mariadb-database-create" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.038287 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="1df2b5da-f651-420f-b497-67bfab58faad" containerName="mariadb-database-create" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.038452 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="1df2b5da-f651-420f-b497-67bfab58faad" containerName="mariadb-database-create" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.038501 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="20fead8d-5003-473f-b71d-096c2877181d" containerName="mariadb-account-create-update" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.039058 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5mrvt" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.056624 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-5mrvt"] Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.128469 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-f70d-account-create-update-8qqq4"] Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.129517 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f70d-account-create-update-8qqq4" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.131537 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.135504 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-f70d-account-create-update-8qqq4"] Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.182189 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-895dr\" (UniqueName: \"kubernetes.io/projected/28512196-23fd-4d08-838b-fa37482cb529-kube-api-access-895dr\") pod \"keystone-db-create-5mrvt\" (UID: \"28512196-23fd-4d08-838b-fa37482cb529\") " pod="openstack/keystone-db-create-5mrvt" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.182326 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28512196-23fd-4d08-838b-fa37482cb529-operator-scripts\") pod \"keystone-db-create-5mrvt\" (UID: \"28512196-23fd-4d08-838b-fa37482cb529\") " pod="openstack/keystone-db-create-5mrvt" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.284337 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28512196-23fd-4d08-838b-fa37482cb529-operator-scripts\") pod \"keystone-db-create-5mrvt\" (UID: \"28512196-23fd-4d08-838b-fa37482cb529\") " pod="openstack/keystone-db-create-5mrvt" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.285395 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28512196-23fd-4d08-838b-fa37482cb529-operator-scripts\") pod \"keystone-db-create-5mrvt\" (UID: \"28512196-23fd-4d08-838b-fa37482cb529\") " pod="openstack/keystone-db-create-5mrvt" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.284469 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-895dr\" (UniqueName: \"kubernetes.io/projected/28512196-23fd-4d08-838b-fa37482cb529-kube-api-access-895dr\") pod \"keystone-db-create-5mrvt\" (UID: \"28512196-23fd-4d08-838b-fa37482cb529\") " pod="openstack/keystone-db-create-5mrvt" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.285604 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c79841e5-8f30-49ec-ac9f-3be1fc08db4e-operator-scripts\") pod \"keystone-f70d-account-create-update-8qqq4\" (UID: \"c79841e5-8f30-49ec-ac9f-3be1fc08db4e\") " pod="openstack/keystone-f70d-account-create-update-8qqq4" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.285652 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgdqg\" (UniqueName: \"kubernetes.io/projected/c79841e5-8f30-49ec-ac9f-3be1fc08db4e-kube-api-access-qgdqg\") pod \"keystone-f70d-account-create-update-8qqq4\" (UID: \"c79841e5-8f30-49ec-ac9f-3be1fc08db4e\") " pod="openstack/keystone-f70d-account-create-update-8qqq4" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.295543 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-x6s4c"] Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.296660 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-x6s4c" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.315309 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-x6s4c"] Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.326840 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-895dr\" (UniqueName: \"kubernetes.io/projected/28512196-23fd-4d08-838b-fa37482cb529-kube-api-access-895dr\") pod \"keystone-db-create-5mrvt\" (UID: \"28512196-23fd-4d08-838b-fa37482cb529\") " pod="openstack/keystone-db-create-5mrvt" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.360789 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5mrvt" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.387079 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c79841e5-8f30-49ec-ac9f-3be1fc08db4e-operator-scripts\") pod \"keystone-f70d-account-create-update-8qqq4\" (UID: \"c79841e5-8f30-49ec-ac9f-3be1fc08db4e\") " pod="openstack/keystone-f70d-account-create-update-8qqq4" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.387325 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgdqg\" (UniqueName: \"kubernetes.io/projected/c79841e5-8f30-49ec-ac9f-3be1fc08db4e-kube-api-access-qgdqg\") pod \"keystone-f70d-account-create-update-8qqq4\" (UID: \"c79841e5-8f30-49ec-ac9f-3be1fc08db4e\") " pod="openstack/keystone-f70d-account-create-update-8qqq4" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.387860 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c79841e5-8f30-49ec-ac9f-3be1fc08db4e-operator-scripts\") pod \"keystone-f70d-account-create-update-8qqq4\" (UID: \"c79841e5-8f30-49ec-ac9f-3be1fc08db4e\") " pod="openstack/keystone-f70d-account-create-update-8qqq4" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.403001 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgdqg\" (UniqueName: \"kubernetes.io/projected/c79841e5-8f30-49ec-ac9f-3be1fc08db4e-kube-api-access-qgdqg\") pod \"keystone-f70d-account-create-update-8qqq4\" (UID: \"c79841e5-8f30-49ec-ac9f-3be1fc08db4e\") " pod="openstack/keystone-f70d-account-create-update-8qqq4" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.439098 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-3087-account-create-update-857qc"] Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.440149 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-3087-account-create-update-857qc" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.445069 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.445310 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f70d-account-create-update-8qqq4" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.454708 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-3087-account-create-update-857qc"] Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.488950 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9048046c-7133-4c81-b7ae-a37062e07f2d-operator-scripts\") pod \"placement-db-create-x6s4c\" (UID: \"9048046c-7133-4c81-b7ae-a37062e07f2d\") " pod="openstack/placement-db-create-x6s4c" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.489172 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqqcd\" (UniqueName: \"kubernetes.io/projected/9048046c-7133-4c81-b7ae-a37062e07f2d-kube-api-access-xqqcd\") pod \"placement-db-create-x6s4c\" (UID: \"9048046c-7133-4c81-b7ae-a37062e07f2d\") " pod="openstack/placement-db-create-x6s4c" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.591047 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9048046c-7133-4c81-b7ae-a37062e07f2d-operator-scripts\") pod \"placement-db-create-x6s4c\" (UID: \"9048046c-7133-4c81-b7ae-a37062e07f2d\") " pod="openstack/placement-db-create-x6s4c" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.591155 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqqcd\" (UniqueName: \"kubernetes.io/projected/9048046c-7133-4c81-b7ae-a37062e07f2d-kube-api-access-xqqcd\") pod \"placement-db-create-x6s4c\" (UID: \"9048046c-7133-4c81-b7ae-a37062e07f2d\") " pod="openstack/placement-db-create-x6s4c" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.591188 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq96d\" (UniqueName: \"kubernetes.io/projected/778f3550-079c-40ca-bb05-8e2652592b93-kube-api-access-cq96d\") pod \"placement-3087-account-create-update-857qc\" (UID: \"778f3550-079c-40ca-bb05-8e2652592b93\") " pod="openstack/placement-3087-account-create-update-857qc" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.591228 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/778f3550-079c-40ca-bb05-8e2652592b93-operator-scripts\") pod \"placement-3087-account-create-update-857qc\" (UID: \"778f3550-079c-40ca-bb05-8e2652592b93\") " pod="openstack/placement-3087-account-create-update-857qc" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.591833 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9048046c-7133-4c81-b7ae-a37062e07f2d-operator-scripts\") pod \"placement-db-create-x6s4c\" (UID: \"9048046c-7133-4c81-b7ae-a37062e07f2d\") " pod="openstack/placement-db-create-x6s4c" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.607213 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqqcd\" (UniqueName: \"kubernetes.io/projected/9048046c-7133-4c81-b7ae-a37062e07f2d-kube-api-access-xqqcd\") pod \"placement-db-create-x6s4c\" (UID: \"9048046c-7133-4c81-b7ae-a37062e07f2d\") " pod="openstack/placement-db-create-x6s4c" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.613551 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-x6s4c" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.637604 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-fxwsg"] Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.638900 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-fxwsg" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.661242 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-fxwsg"] Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.692550 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq96d\" (UniqueName: \"kubernetes.io/projected/778f3550-079c-40ca-bb05-8e2652592b93-kube-api-access-cq96d\") pod \"placement-3087-account-create-update-857qc\" (UID: \"778f3550-079c-40ca-bb05-8e2652592b93\") " pod="openstack/placement-3087-account-create-update-857qc" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.692613 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/778f3550-079c-40ca-bb05-8e2652592b93-operator-scripts\") pod \"placement-3087-account-create-update-857qc\" (UID: \"778f3550-079c-40ca-bb05-8e2652592b93\") " pod="openstack/placement-3087-account-create-update-857qc" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.693231 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/778f3550-079c-40ca-bb05-8e2652592b93-operator-scripts\") pod \"placement-3087-account-create-update-857qc\" (UID: \"778f3550-079c-40ca-bb05-8e2652592b93\") " pod="openstack/placement-3087-account-create-update-857qc" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.713785 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq96d\" (UniqueName: \"kubernetes.io/projected/778f3550-079c-40ca-bb05-8e2652592b93-kube-api-access-cq96d\") pod \"placement-3087-account-create-update-857qc\" (UID: \"778f3550-079c-40ca-bb05-8e2652592b93\") " pod="openstack/placement-3087-account-create-update-857qc" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.740926 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-5e37-account-create-update-hqmwb"] Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.742513 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5e37-account-create-update-hqmwb" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.746269 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.758188 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5e37-account-create-update-hqmwb"] Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.764928 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-3087-account-create-update-857qc" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.794336 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xf74\" (UniqueName: \"kubernetes.io/projected/7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8-kube-api-access-9xf74\") pod \"glance-db-create-fxwsg\" (UID: \"7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8\") " pod="openstack/glance-db-create-fxwsg" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.794525 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8-operator-scripts\") pod \"glance-db-create-fxwsg\" (UID: \"7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8\") " pod="openstack/glance-db-create-fxwsg" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.897502 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56m7r\" (UniqueName: \"kubernetes.io/projected/54991fb3-bb13-413e-a8f1-bf85d25ed2dd-kube-api-access-56m7r\") pod \"glance-5e37-account-create-update-hqmwb\" (UID: \"54991fb3-bb13-413e-a8f1-bf85d25ed2dd\") " pod="openstack/glance-5e37-account-create-update-hqmwb" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.897913 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xf74\" (UniqueName: \"kubernetes.io/projected/7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8-kube-api-access-9xf74\") pod \"glance-db-create-fxwsg\" (UID: \"7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8\") " pod="openstack/glance-db-create-fxwsg" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.898500 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54991fb3-bb13-413e-a8f1-bf85d25ed2dd-operator-scripts\") pod \"glance-5e37-account-create-update-hqmwb\" (UID: \"54991fb3-bb13-413e-a8f1-bf85d25ed2dd\") " pod="openstack/glance-5e37-account-create-update-hqmwb" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.898754 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8-operator-scripts\") pod \"glance-db-create-fxwsg\" (UID: \"7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8\") " pod="openstack/glance-db-create-fxwsg" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.899856 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8-operator-scripts\") pod \"glance-db-create-fxwsg\" (UID: \"7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8\") " pod="openstack/glance-db-create-fxwsg" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.934116 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xf74\" (UniqueName: \"kubernetes.io/projected/7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8-kube-api-access-9xf74\") pod \"glance-db-create-fxwsg\" (UID: \"7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8\") " pod="openstack/glance-db-create-fxwsg" Dec 02 14:34:53 crc kubenswrapper[4902]: I1202 14:34:53.973625 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-fxwsg" Dec 02 14:34:54 crc kubenswrapper[4902]: I1202 14:34:54.001427 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56m7r\" (UniqueName: \"kubernetes.io/projected/54991fb3-bb13-413e-a8f1-bf85d25ed2dd-kube-api-access-56m7r\") pod \"glance-5e37-account-create-update-hqmwb\" (UID: \"54991fb3-bb13-413e-a8f1-bf85d25ed2dd\") " pod="openstack/glance-5e37-account-create-update-hqmwb" Dec 02 14:34:54 crc kubenswrapper[4902]: I1202 14:34:54.001552 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54991fb3-bb13-413e-a8f1-bf85d25ed2dd-operator-scripts\") pod \"glance-5e37-account-create-update-hqmwb\" (UID: \"54991fb3-bb13-413e-a8f1-bf85d25ed2dd\") " pod="openstack/glance-5e37-account-create-update-hqmwb" Dec 02 14:34:54 crc kubenswrapper[4902]: I1202 14:34:54.003010 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54991fb3-bb13-413e-a8f1-bf85d25ed2dd-operator-scripts\") pod \"glance-5e37-account-create-update-hqmwb\" (UID: \"54991fb3-bb13-413e-a8f1-bf85d25ed2dd\") " pod="openstack/glance-5e37-account-create-update-hqmwb" Dec 02 14:34:54 crc kubenswrapper[4902]: I1202 14:34:54.022026 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56m7r\" (UniqueName: \"kubernetes.io/projected/54991fb3-bb13-413e-a8f1-bf85d25ed2dd-kube-api-access-56m7r\") pod \"glance-5e37-account-create-update-hqmwb\" (UID: \"54991fb3-bb13-413e-a8f1-bf85d25ed2dd\") " pod="openstack/glance-5e37-account-create-update-hqmwb" Dec 02 14:34:54 crc kubenswrapper[4902]: I1202 14:34:54.071236 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5e37-account-create-update-hqmwb" Dec 02 14:34:54 crc kubenswrapper[4902]: I1202 14:34:54.977897 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-3087-account-create-update-857qc"] Dec 02 14:34:54 crc kubenswrapper[4902]: W1202 14:34:54.985061 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod778f3550_079c_40ca_bb05_8e2652592b93.slice/crio-bb803bc29a74b722429c2f1c645f5b4712ff29104314af9e5406365262f30df0 WatchSource:0}: Error finding container bb803bc29a74b722429c2f1c645f5b4712ff29104314af9e5406365262f30df0: Status 404 returned error can't find the container with id bb803bc29a74b722429c2f1c645f5b4712ff29104314af9e5406365262f30df0 Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.067734 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-5mrvt"] Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.076709 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-x6s4c"] Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.082728 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-f70d-account-create-update-8qqq4"] Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.122104 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f70d-account-create-update-8qqq4" event={"ID":"c79841e5-8f30-49ec-ac9f-3be1fc08db4e","Type":"ContainerStarted","Data":"646819193b9d58667452a35cb25b892f4e033dd4d60d71f380926c91fb09a2f3"} Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.122235 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-x6s4c" event={"ID":"9048046c-7133-4c81-b7ae-a37062e07f2d","Type":"ContainerStarted","Data":"945cb2412672b85ec6b2341fab1bc70746c2630fd011db248139b2d5aa3b38cb"} Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.124447 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0","Type":"ContainerStarted","Data":"741b7773fa54e6bb38c4cc403804a3a6e0413880a7993b0f261958613b7ee5a0"} Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.127280 4902 generic.go:334] "Generic (PLEG): container finished" podID="3f809093-c39f-40fe-a785-69a1edd2bdf9" containerID="ec45adfceb7bf6c0df8aa0c1e7b0ccde5cc75a98df28a3e432b0590a8f3b564f" exitCode=0 Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.127401 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-4x7nc" event={"ID":"3f809093-c39f-40fe-a785-69a1edd2bdf9","Type":"ContainerDied","Data":"ec45adfceb7bf6c0df8aa0c1e7b0ccde5cc75a98df28a3e432b0590a8f3b564f"} Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.128995 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-3087-account-create-update-857qc" event={"ID":"778f3550-079c-40ca-bb05-8e2652592b93","Type":"ContainerStarted","Data":"bb803bc29a74b722429c2f1c645f5b4712ff29104314af9e5406365262f30df0"} Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.135620 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5mrvt" event={"ID":"28512196-23fd-4d08-838b-fa37482cb529","Type":"ContainerStarted","Data":"5632734490bc7a0255aef39145de2c077b6672c314acf75c885dc3015ca0bf1e"} Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.161982 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=12.680384355 podStartE2EDuration="50.161964304s" podCreationTimestamp="2025-12-02 14:34:05 +0000 UTC" firstStartedPulling="2025-12-02 14:34:17.020860454 +0000 UTC m=+1088.212169163" lastFinishedPulling="2025-12-02 14:34:54.502440403 +0000 UTC m=+1125.693749112" observedRunningTime="2025-12-02 14:34:55.151373418 +0000 UTC m=+1126.342682137" watchObservedRunningTime="2025-12-02 14:34:55.161964304 +0000 UTC m=+1126.353273013" Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.212960 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-fxwsg"] Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.248788 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5e37-account-create-update-hqmwb"] Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.555765 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.610981 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-ppngm"] Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.611212 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" podUID="5f338b3f-0275-42a1-be04-016bd343525c" containerName="dnsmasq-dns" containerID="cri-o://1a97c385d9240d977ec98d155f287c25b1eff237d41e4faea9b1ebd3290f555f" gracePeriod=10 Dec 02 14:34:55 crc kubenswrapper[4902]: I1202 14:34:55.613324 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:55 crc kubenswrapper[4902]: E1202 14:34:55.674183 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod28512196_23fd_4d08_838b_fa37482cb529.slice/crio-c9b77d35f8e607502d1ce5219844571e35718ee59b530d64130e4528ba240dec.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod28512196_23fd_4d08_838b_fa37482cb529.slice/crio-conmon-c9b77d35f8e607502d1ce5219844571e35718ee59b530d64130e4528ba240dec.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9048046c_7133_4c81_b7ae_a37062e07f2d.slice/crio-06b40d9e5bd3eaa58b90fa70c39882119c3ff48e0300d41e3da9ae046219736f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f338b3f_0275_42a1_be04_016bd343525c.slice/crio-1a97c385d9240d977ec98d155f287c25b1eff237d41e4faea9b1ebd3290f555f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9048046c_7133_4c81_b7ae_a37062e07f2d.slice/crio-conmon-06b40d9e5bd3eaa58b90fa70c39882119c3ff48e0300d41e3da9ae046219736f.scope\": RecentStats: unable to find data in memory cache]" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.053950 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.136790 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhzjp\" (UniqueName: \"kubernetes.io/projected/5f338b3f-0275-42a1-be04-016bd343525c-kube-api-access-zhzjp\") pod \"5f338b3f-0275-42a1-be04-016bd343525c\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.137042 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-dns-svc\") pod \"5f338b3f-0275-42a1-be04-016bd343525c\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.137215 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-ovsdbserver-sb\") pod \"5f338b3f-0275-42a1-be04-016bd343525c\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.137351 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-config\") pod \"5f338b3f-0275-42a1-be04-016bd343525c\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.137895 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-ovsdbserver-nb\") pod \"5f338b3f-0275-42a1-be04-016bd343525c\" (UID: \"5f338b3f-0275-42a1-be04-016bd343525c\") " Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.147025 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f338b3f-0275-42a1-be04-016bd343525c-kube-api-access-zhzjp" (OuterVolumeSpecName: "kube-api-access-zhzjp") pod "5f338b3f-0275-42a1-be04-016bd343525c" (UID: "5f338b3f-0275-42a1-be04-016bd343525c"). InnerVolumeSpecName "kube-api-access-zhzjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.148129 4902 generic.go:334] "Generic (PLEG): container finished" podID="7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8" containerID="9361276f6eeb53ae0225d4413033c14c1bcbcce210e8b6ca89d1ad06d8dc1326" exitCode=0 Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.148187 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-fxwsg" event={"ID":"7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8","Type":"ContainerDied","Data":"9361276f6eeb53ae0225d4413033c14c1bcbcce210e8b6ca89d1ad06d8dc1326"} Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.148211 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-fxwsg" event={"ID":"7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8","Type":"ContainerStarted","Data":"f77037447f2444d4f6f92be346a3be7c5268bf9e62d18d3c68b6cb319154277d"} Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.149937 4902 generic.go:334] "Generic (PLEG): container finished" podID="54991fb3-bb13-413e-a8f1-bf85d25ed2dd" containerID="624cc007993e50fb3bddc9f6e14d3412b61dbec28908d37372affac18f028677" exitCode=0 Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.149987 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5e37-account-create-update-hqmwb" event={"ID":"54991fb3-bb13-413e-a8f1-bf85d25ed2dd","Type":"ContainerDied","Data":"624cc007993e50fb3bddc9f6e14d3412b61dbec28908d37372affac18f028677"} Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.150007 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5e37-account-create-update-hqmwb" event={"ID":"54991fb3-bb13-413e-a8f1-bf85d25ed2dd","Type":"ContainerStarted","Data":"f521da5bc96c60de7936a94379f76ed4a5bcfc0996a4308ff250dc49f416349d"} Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.151591 4902 generic.go:334] "Generic (PLEG): container finished" podID="778f3550-079c-40ca-bb05-8e2652592b93" containerID="bef0336660e71ddff7d4548e11bc2d01e8096e924f34ba996f6b785e8b646e7a" exitCode=0 Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.151643 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-3087-account-create-update-857qc" event={"ID":"778f3550-079c-40ca-bb05-8e2652592b93","Type":"ContainerDied","Data":"bef0336660e71ddff7d4548e11bc2d01e8096e924f34ba996f6b785e8b646e7a"} Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.152978 4902 generic.go:334] "Generic (PLEG): container finished" podID="28512196-23fd-4d08-838b-fa37482cb529" containerID="c9b77d35f8e607502d1ce5219844571e35718ee59b530d64130e4528ba240dec" exitCode=0 Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.153032 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5mrvt" event={"ID":"28512196-23fd-4d08-838b-fa37482cb529","Type":"ContainerDied","Data":"c9b77d35f8e607502d1ce5219844571e35718ee59b530d64130e4528ba240dec"} Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.158925 4902 generic.go:334] "Generic (PLEG): container finished" podID="c79841e5-8f30-49ec-ac9f-3be1fc08db4e" containerID="d2740e149fd66ee0ac278ba7a94ea65f4bda9a6e1e2edcc2769605cd196c2ba5" exitCode=0 Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.158997 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f70d-account-create-update-8qqq4" event={"ID":"c79841e5-8f30-49ec-ac9f-3be1fc08db4e","Type":"ContainerDied","Data":"d2740e149fd66ee0ac278ba7a94ea65f4bda9a6e1e2edcc2769605cd196c2ba5"} Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.161831 4902 generic.go:334] "Generic (PLEG): container finished" podID="9048046c-7133-4c81-b7ae-a37062e07f2d" containerID="06b40d9e5bd3eaa58b90fa70c39882119c3ff48e0300d41e3da9ae046219736f" exitCode=0 Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.161974 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-x6s4c" event={"ID":"9048046c-7133-4c81-b7ae-a37062e07f2d","Type":"ContainerDied","Data":"06b40d9e5bd3eaa58b90fa70c39882119c3ff48e0300d41e3da9ae046219736f"} Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.163862 4902 generic.go:334] "Generic (PLEG): container finished" podID="5f338b3f-0275-42a1-be04-016bd343525c" containerID="1a97c385d9240d977ec98d155f287c25b1eff237d41e4faea9b1ebd3290f555f" exitCode=0 Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.164970 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.165193 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" event={"ID":"5f338b3f-0275-42a1-be04-016bd343525c","Type":"ContainerDied","Data":"1a97c385d9240d977ec98d155f287c25b1eff237d41e4faea9b1ebd3290f555f"} Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.165385 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" event={"ID":"5f338b3f-0275-42a1-be04-016bd343525c","Type":"ContainerDied","Data":"f30cc23cf2f05323816c6009ddb0cc916a90f6bf4beedbb038bf549668ddb8f8"} Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.166978 4902 scope.go:117] "RemoveContainer" containerID="1a97c385d9240d977ec98d155f287c25b1eff237d41e4faea9b1ebd3290f555f" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.190204 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5f338b3f-0275-42a1-be04-016bd343525c" (UID: "5f338b3f-0275-42a1-be04-016bd343525c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.196789 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-config" (OuterVolumeSpecName: "config") pod "5f338b3f-0275-42a1-be04-016bd343525c" (UID: "5f338b3f-0275-42a1-be04-016bd343525c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.210140 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5f338b3f-0275-42a1-be04-016bd343525c" (UID: "5f338b3f-0275-42a1-be04-016bd343525c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.213602 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5f338b3f-0275-42a1-be04-016bd343525c" (UID: "5f338b3f-0275-42a1-be04-016bd343525c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.221366 4902 scope.go:117] "RemoveContainer" containerID="7d189b4b21cf02a8f542c21af5cf6ef732d2e164b4e4f9bae97f207caccc0e85" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.240625 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhzjp\" (UniqueName: \"kubernetes.io/projected/5f338b3f-0275-42a1-be04-016bd343525c-kube-api-access-zhzjp\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.240660 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.240675 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.240686 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.240697 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5f338b3f-0275-42a1-be04-016bd343525c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.252286 4902 scope.go:117] "RemoveContainer" containerID="1a97c385d9240d977ec98d155f287c25b1eff237d41e4faea9b1ebd3290f555f" Dec 02 14:34:56 crc kubenswrapper[4902]: E1202 14:34:56.255133 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a97c385d9240d977ec98d155f287c25b1eff237d41e4faea9b1ebd3290f555f\": container with ID starting with 1a97c385d9240d977ec98d155f287c25b1eff237d41e4faea9b1ebd3290f555f not found: ID does not exist" containerID="1a97c385d9240d977ec98d155f287c25b1eff237d41e4faea9b1ebd3290f555f" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.255175 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a97c385d9240d977ec98d155f287c25b1eff237d41e4faea9b1ebd3290f555f"} err="failed to get container status \"1a97c385d9240d977ec98d155f287c25b1eff237d41e4faea9b1ebd3290f555f\": rpc error: code = NotFound desc = could not find container \"1a97c385d9240d977ec98d155f287c25b1eff237d41e4faea9b1ebd3290f555f\": container with ID starting with 1a97c385d9240d977ec98d155f287c25b1eff237d41e4faea9b1ebd3290f555f not found: ID does not exist" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.255205 4902 scope.go:117] "RemoveContainer" containerID="7d189b4b21cf02a8f542c21af5cf6ef732d2e164b4e4f9bae97f207caccc0e85" Dec 02 14:34:56 crc kubenswrapper[4902]: E1202 14:34:56.255596 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d189b4b21cf02a8f542c21af5cf6ef732d2e164b4e4f9bae97f207caccc0e85\": container with ID starting with 7d189b4b21cf02a8f542c21af5cf6ef732d2e164b4e4f9bae97f207caccc0e85 not found: ID does not exist" containerID="7d189b4b21cf02a8f542c21af5cf6ef732d2e164b4e4f9bae97f207caccc0e85" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.255635 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d189b4b21cf02a8f542c21af5cf6ef732d2e164b4e4f9bae97f207caccc0e85"} err="failed to get container status \"7d189b4b21cf02a8f542c21af5cf6ef732d2e164b4e4f9bae97f207caccc0e85\": rpc error: code = NotFound desc = could not find container \"7d189b4b21cf02a8f542c21af5cf6ef732d2e164b4e4f9bae97f207caccc0e85\": container with ID starting with 7d189b4b21cf02a8f542c21af5cf6ef732d2e164b4e4f9bae97f207caccc0e85 not found: ID does not exist" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.521268 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.539540 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-ppngm"] Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.547786 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-ppngm"] Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.609205 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.647092 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f809093-c39f-40fe-a785-69a1edd2bdf9-scripts\") pod \"3f809093-c39f-40fe-a785-69a1edd2bdf9\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.647211 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swtsm\" (UniqueName: \"kubernetes.io/projected/3f809093-c39f-40fe-a785-69a1edd2bdf9-kube-api-access-swtsm\") pod \"3f809093-c39f-40fe-a785-69a1edd2bdf9\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.647255 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3f809093-c39f-40fe-a785-69a1edd2bdf9-ring-data-devices\") pod \"3f809093-c39f-40fe-a785-69a1edd2bdf9\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.647305 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-dispersionconf\") pod \"3f809093-c39f-40fe-a785-69a1edd2bdf9\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.647462 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-swiftconf\") pod \"3f809093-c39f-40fe-a785-69a1edd2bdf9\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.647521 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-combined-ca-bundle\") pod \"3f809093-c39f-40fe-a785-69a1edd2bdf9\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.647591 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3f809093-c39f-40fe-a785-69a1edd2bdf9-etc-swift\") pod \"3f809093-c39f-40fe-a785-69a1edd2bdf9\" (UID: \"3f809093-c39f-40fe-a785-69a1edd2bdf9\") " Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.648386 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f809093-c39f-40fe-a785-69a1edd2bdf9-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "3f809093-c39f-40fe-a785-69a1edd2bdf9" (UID: "3f809093-c39f-40fe-a785-69a1edd2bdf9"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.649188 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f809093-c39f-40fe-a785-69a1edd2bdf9-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "3f809093-c39f-40fe-a785-69a1edd2bdf9" (UID: "3f809093-c39f-40fe-a785-69a1edd2bdf9"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.654952 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f809093-c39f-40fe-a785-69a1edd2bdf9-kube-api-access-swtsm" (OuterVolumeSpecName: "kube-api-access-swtsm") pod "3f809093-c39f-40fe-a785-69a1edd2bdf9" (UID: "3f809093-c39f-40fe-a785-69a1edd2bdf9"). InnerVolumeSpecName "kube-api-access-swtsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.668293 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "3f809093-c39f-40fe-a785-69a1edd2bdf9" (UID: "3f809093-c39f-40fe-a785-69a1edd2bdf9"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.672898 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f809093-c39f-40fe-a785-69a1edd2bdf9-scripts" (OuterVolumeSpecName: "scripts") pod "3f809093-c39f-40fe-a785-69a1edd2bdf9" (UID: "3f809093-c39f-40fe-a785-69a1edd2bdf9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.679690 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f809093-c39f-40fe-a785-69a1edd2bdf9" (UID: "3f809093-c39f-40fe-a785-69a1edd2bdf9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.687133 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "3f809093-c39f-40fe-a785-69a1edd2bdf9" (UID: "3f809093-c39f-40fe-a785-69a1edd2bdf9"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.749302 4902 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.749349 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.749364 4902 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3f809093-c39f-40fe-a785-69a1edd2bdf9-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.749375 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f809093-c39f-40fe-a785-69a1edd2bdf9-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.749386 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swtsm\" (UniqueName: \"kubernetes.io/projected/3f809093-c39f-40fe-a785-69a1edd2bdf9-kube-api-access-swtsm\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.749417 4902 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3f809093-c39f-40fe-a785-69a1edd2bdf9-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:56 crc kubenswrapper[4902]: I1202 14:34:56.749428 4902 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3f809093-c39f-40fe-a785-69a1edd2bdf9-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.121534 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f338b3f-0275-42a1-be04-016bd343525c" path="/var/lib/kubelet/pods/5f338b3f-0275-42a1-be04-016bd343525c/volumes" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.175245 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-4x7nc" event={"ID":"3f809093-c39f-40fe-a785-69a1edd2bdf9","Type":"ContainerDied","Data":"344b24eef3cee9f002f40414da597ef3ff1068133601421207c71ef9f017d5fe"} Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.175685 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="344b24eef3cee9f002f40414da597ef3ff1068133601421207c71ef9f017d5fe" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.175278 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-4x7nc" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.616244 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f70d-account-create-update-8qqq4" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.665222 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c79841e5-8f30-49ec-ac9f-3be1fc08db4e-operator-scripts\") pod \"c79841e5-8f30-49ec-ac9f-3be1fc08db4e\" (UID: \"c79841e5-8f30-49ec-ac9f-3be1fc08db4e\") " Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.665421 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgdqg\" (UniqueName: \"kubernetes.io/projected/c79841e5-8f30-49ec-ac9f-3be1fc08db4e-kube-api-access-qgdqg\") pod \"c79841e5-8f30-49ec-ac9f-3be1fc08db4e\" (UID: \"c79841e5-8f30-49ec-ac9f-3be1fc08db4e\") " Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.665679 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c79841e5-8f30-49ec-ac9f-3be1fc08db4e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c79841e5-8f30-49ec-ac9f-3be1fc08db4e" (UID: "c79841e5-8f30-49ec-ac9f-3be1fc08db4e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.666095 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c79841e5-8f30-49ec-ac9f-3be1fc08db4e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.671500 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c79841e5-8f30-49ec-ac9f-3be1fc08db4e-kube-api-access-qgdqg" (OuterVolumeSpecName: "kube-api-access-qgdqg") pod "c79841e5-8f30-49ec-ac9f-3be1fc08db4e" (UID: "c79841e5-8f30-49ec-ac9f-3be1fc08db4e"). InnerVolumeSpecName "kube-api-access-qgdqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.720055 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-fxwsg" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.733189 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5mrvt" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.744082 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-x6s4c" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.759080 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-3087-account-create-update-857qc" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.767028 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8-operator-scripts\") pod \"7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8\" (UID: \"7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8\") " Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.767198 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xf74\" (UniqueName: \"kubernetes.io/projected/7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8-kube-api-access-9xf74\") pod \"7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8\" (UID: \"7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8\") " Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.767456 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8" (UID: "7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.769210 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgdqg\" (UniqueName: \"kubernetes.io/projected/c79841e5-8f30-49ec-ac9f-3be1fc08db4e-kube-api-access-qgdqg\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.769233 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.770319 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8-kube-api-access-9xf74" (OuterVolumeSpecName: "kube-api-access-9xf74") pod "7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8" (UID: "7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8"). InnerVolumeSpecName "kube-api-access-9xf74". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.772078 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5e37-account-create-update-hqmwb" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.869779 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqqcd\" (UniqueName: \"kubernetes.io/projected/9048046c-7133-4c81-b7ae-a37062e07f2d-kube-api-access-xqqcd\") pod \"9048046c-7133-4c81-b7ae-a37062e07f2d\" (UID: \"9048046c-7133-4c81-b7ae-a37062e07f2d\") " Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.869857 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9048046c-7133-4c81-b7ae-a37062e07f2d-operator-scripts\") pod \"9048046c-7133-4c81-b7ae-a37062e07f2d\" (UID: \"9048046c-7133-4c81-b7ae-a37062e07f2d\") " Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.869891 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54991fb3-bb13-413e-a8f1-bf85d25ed2dd-operator-scripts\") pod \"54991fb3-bb13-413e-a8f1-bf85d25ed2dd\" (UID: \"54991fb3-bb13-413e-a8f1-bf85d25ed2dd\") " Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.870000 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28512196-23fd-4d08-838b-fa37482cb529-operator-scripts\") pod \"28512196-23fd-4d08-838b-fa37482cb529\" (UID: \"28512196-23fd-4d08-838b-fa37482cb529\") " Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.870034 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-895dr\" (UniqueName: \"kubernetes.io/projected/28512196-23fd-4d08-838b-fa37482cb529-kube-api-access-895dr\") pod \"28512196-23fd-4d08-838b-fa37482cb529\" (UID: \"28512196-23fd-4d08-838b-fa37482cb529\") " Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.870056 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/778f3550-079c-40ca-bb05-8e2652592b93-operator-scripts\") pod \"778f3550-079c-40ca-bb05-8e2652592b93\" (UID: \"778f3550-079c-40ca-bb05-8e2652592b93\") " Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.870127 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56m7r\" (UniqueName: \"kubernetes.io/projected/54991fb3-bb13-413e-a8f1-bf85d25ed2dd-kube-api-access-56m7r\") pod \"54991fb3-bb13-413e-a8f1-bf85d25ed2dd\" (UID: \"54991fb3-bb13-413e-a8f1-bf85d25ed2dd\") " Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.870169 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cq96d\" (UniqueName: \"kubernetes.io/projected/778f3550-079c-40ca-bb05-8e2652592b93-kube-api-access-cq96d\") pod \"778f3550-079c-40ca-bb05-8e2652592b93\" (UID: \"778f3550-079c-40ca-bb05-8e2652592b93\") " Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.870593 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xf74\" (UniqueName: \"kubernetes.io/projected/7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8-kube-api-access-9xf74\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.872335 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28512196-23fd-4d08-838b-fa37482cb529-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "28512196-23fd-4d08-838b-fa37482cb529" (UID: "28512196-23fd-4d08-838b-fa37482cb529"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.873352 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/778f3550-079c-40ca-bb05-8e2652592b93-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "778f3550-079c-40ca-bb05-8e2652592b93" (UID: "778f3550-079c-40ca-bb05-8e2652592b93"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.873431 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9048046c-7133-4c81-b7ae-a37062e07f2d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9048046c-7133-4c81-b7ae-a37062e07f2d" (UID: "9048046c-7133-4c81-b7ae-a37062e07f2d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.873464 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54991fb3-bb13-413e-a8f1-bf85d25ed2dd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "54991fb3-bb13-413e-a8f1-bf85d25ed2dd" (UID: "54991fb3-bb13-413e-a8f1-bf85d25ed2dd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.873992 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/778f3550-079c-40ca-bb05-8e2652592b93-kube-api-access-cq96d" (OuterVolumeSpecName: "kube-api-access-cq96d") pod "778f3550-079c-40ca-bb05-8e2652592b93" (UID: "778f3550-079c-40ca-bb05-8e2652592b93"). InnerVolumeSpecName "kube-api-access-cq96d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.875827 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9048046c-7133-4c81-b7ae-a37062e07f2d-kube-api-access-xqqcd" (OuterVolumeSpecName: "kube-api-access-xqqcd") pod "9048046c-7133-4c81-b7ae-a37062e07f2d" (UID: "9048046c-7133-4c81-b7ae-a37062e07f2d"). InnerVolumeSpecName "kube-api-access-xqqcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.888492 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28512196-23fd-4d08-838b-fa37482cb529-kube-api-access-895dr" (OuterVolumeSpecName: "kube-api-access-895dr") pod "28512196-23fd-4d08-838b-fa37482cb529" (UID: "28512196-23fd-4d08-838b-fa37482cb529"). InnerVolumeSpecName "kube-api-access-895dr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.888731 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54991fb3-bb13-413e-a8f1-bf85d25ed2dd-kube-api-access-56m7r" (OuterVolumeSpecName: "kube-api-access-56m7r") pod "54991fb3-bb13-413e-a8f1-bf85d25ed2dd" (UID: "54991fb3-bb13-413e-a8f1-bf85d25ed2dd"). InnerVolumeSpecName "kube-api-access-56m7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.972487 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28512196-23fd-4d08-838b-fa37482cb529-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.972525 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-895dr\" (UniqueName: \"kubernetes.io/projected/28512196-23fd-4d08-838b-fa37482cb529-kube-api-access-895dr\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.972538 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/778f3550-079c-40ca-bb05-8e2652592b93-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.972548 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56m7r\" (UniqueName: \"kubernetes.io/projected/54991fb3-bb13-413e-a8f1-bf85d25ed2dd-kube-api-access-56m7r\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.972557 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cq96d\" (UniqueName: \"kubernetes.io/projected/778f3550-079c-40ca-bb05-8e2652592b93-kube-api-access-cq96d\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.972582 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqqcd\" (UniqueName: \"kubernetes.io/projected/9048046c-7133-4c81-b7ae-a37062e07f2d-kube-api-access-xqqcd\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.972590 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9048046c-7133-4c81-b7ae-a37062e07f2d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:57 crc kubenswrapper[4902]: I1202 14:34:57.972599 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54991fb3-bb13-413e-a8f1-bf85d25ed2dd-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.188837 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-3087-account-create-update-857qc" event={"ID":"778f3550-079c-40ca-bb05-8e2652592b93","Type":"ContainerDied","Data":"bb803bc29a74b722429c2f1c645f5b4712ff29104314af9e5406365262f30df0"} Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.188885 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb803bc29a74b722429c2f1c645f5b4712ff29104314af9e5406365262f30df0" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.188909 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-3087-account-create-update-857qc" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.191059 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5mrvt" event={"ID":"28512196-23fd-4d08-838b-fa37482cb529","Type":"ContainerDied","Data":"5632734490bc7a0255aef39145de2c077b6672c314acf75c885dc3015ca0bf1e"} Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.191071 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5mrvt" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.191085 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5632734490bc7a0255aef39145de2c077b6672c314acf75c885dc3015ca0bf1e" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.193111 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f70d-account-create-update-8qqq4" event={"ID":"c79841e5-8f30-49ec-ac9f-3be1fc08db4e","Type":"ContainerDied","Data":"646819193b9d58667452a35cb25b892f4e033dd4d60d71f380926c91fb09a2f3"} Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.193132 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="646819193b9d58667452a35cb25b892f4e033dd4d60d71f380926c91fb09a2f3" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.193155 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f70d-account-create-update-8qqq4" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.194676 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-x6s4c" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.194673 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-x6s4c" event={"ID":"9048046c-7133-4c81-b7ae-a37062e07f2d","Type":"ContainerDied","Data":"945cb2412672b85ec6b2341fab1bc70746c2630fd011db248139b2d5aa3b38cb"} Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.194831 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="945cb2412672b85ec6b2341fab1bc70746c2630fd011db248139b2d5aa3b38cb" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.196450 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-fxwsg" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.196470 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-fxwsg" event={"ID":"7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8","Type":"ContainerDied","Data":"f77037447f2444d4f6f92be346a3be7c5268bf9e62d18d3c68b6cb319154277d"} Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.196498 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f77037447f2444d4f6f92be346a3be7c5268bf9e62d18d3c68b6cb319154277d" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.198280 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5e37-account-create-update-hqmwb" event={"ID":"54991fb3-bb13-413e-a8f1-bf85d25ed2dd","Type":"ContainerDied","Data":"f521da5bc96c60de7936a94379f76ed4a5bcfc0996a4308ff250dc49f416349d"} Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.198304 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f521da5bc96c60de7936a94379f76ed4a5bcfc0996a4308ff250dc49f416349d" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.198357 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5e37-account-create-update-hqmwb" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.514252 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.526410 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-dkmxc" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.526448 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-67clb" podUID="51ddf240-5cda-4f89-831f-4a20ce9997ed" containerName="ovn-controller" probeResult="failure" output=< Dec 02 14:34:58 crc kubenswrapper[4902]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 02 14:34:58 crc kubenswrapper[4902]: > Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.735655 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-67clb-config-q8fph"] Dec 02 14:34:58 crc kubenswrapper[4902]: E1202 14:34:58.735978 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9048046c-7133-4c81-b7ae-a37062e07f2d" containerName="mariadb-database-create" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.735994 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9048046c-7133-4c81-b7ae-a37062e07f2d" containerName="mariadb-database-create" Dec 02 14:34:58 crc kubenswrapper[4902]: E1202 14:34:58.736008 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28512196-23fd-4d08-838b-fa37482cb529" containerName="mariadb-database-create" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736014 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="28512196-23fd-4d08-838b-fa37482cb529" containerName="mariadb-database-create" Dec 02 14:34:58 crc kubenswrapper[4902]: E1202 14:34:58.736025 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c79841e5-8f30-49ec-ac9f-3be1fc08db4e" containerName="mariadb-account-create-update" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736031 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="c79841e5-8f30-49ec-ac9f-3be1fc08db4e" containerName="mariadb-account-create-update" Dec 02 14:34:58 crc kubenswrapper[4902]: E1202 14:34:58.736043 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8" containerName="mariadb-database-create" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736048 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8" containerName="mariadb-database-create" Dec 02 14:34:58 crc kubenswrapper[4902]: E1202 14:34:58.736055 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f809093-c39f-40fe-a785-69a1edd2bdf9" containerName="swift-ring-rebalance" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736061 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f809093-c39f-40fe-a785-69a1edd2bdf9" containerName="swift-ring-rebalance" Dec 02 14:34:58 crc kubenswrapper[4902]: E1202 14:34:58.736073 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f338b3f-0275-42a1-be04-016bd343525c" containerName="init" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736079 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f338b3f-0275-42a1-be04-016bd343525c" containerName="init" Dec 02 14:34:58 crc kubenswrapper[4902]: E1202 14:34:58.736090 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f338b3f-0275-42a1-be04-016bd343525c" containerName="dnsmasq-dns" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736097 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f338b3f-0275-42a1-be04-016bd343525c" containerName="dnsmasq-dns" Dec 02 14:34:58 crc kubenswrapper[4902]: E1202 14:34:58.736111 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54991fb3-bb13-413e-a8f1-bf85d25ed2dd" containerName="mariadb-account-create-update" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736118 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="54991fb3-bb13-413e-a8f1-bf85d25ed2dd" containerName="mariadb-account-create-update" Dec 02 14:34:58 crc kubenswrapper[4902]: E1202 14:34:58.736128 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="778f3550-079c-40ca-bb05-8e2652592b93" containerName="mariadb-account-create-update" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736134 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="778f3550-079c-40ca-bb05-8e2652592b93" containerName="mariadb-account-create-update" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736269 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f809093-c39f-40fe-a785-69a1edd2bdf9" containerName="swift-ring-rebalance" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736292 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="54991fb3-bb13-413e-a8f1-bf85d25ed2dd" containerName="mariadb-account-create-update" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736308 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="c79841e5-8f30-49ec-ac9f-3be1fc08db4e" containerName="mariadb-account-create-update" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736323 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="28512196-23fd-4d08-838b-fa37482cb529" containerName="mariadb-database-create" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736336 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="778f3550-079c-40ca-bb05-8e2652592b93" containerName="mariadb-account-create-update" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736347 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8" containerName="mariadb-database-create" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736359 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f338b3f-0275-42a1-be04-016bd343525c" containerName="dnsmasq-dns" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736369 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9048046c-7133-4c81-b7ae-a37062e07f2d" containerName="mariadb-database-create" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.736874 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.739796 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.758228 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-67clb-config-q8fph"] Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.795951 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-run\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.796040 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9c939c80-5892-4220-8565-2efe304cb44b-scripts\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.796079 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-log-ovn\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.796122 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9c939c80-5892-4220-8565-2efe304cb44b-additional-scripts\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.796197 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-run-ovn\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.796223 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qr9n\" (UniqueName: \"kubernetes.io/projected/9c939c80-5892-4220-8565-2efe304cb44b-kube-api-access-8qr9n\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.898151 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-run-ovn\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.898202 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qr9n\" (UniqueName: \"kubernetes.io/projected/9c939c80-5892-4220-8565-2efe304cb44b-kube-api-access-8qr9n\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.898270 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-run\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.898316 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9c939c80-5892-4220-8565-2efe304cb44b-scripts\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.898338 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-log-ovn\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.898370 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9c939c80-5892-4220-8565-2efe304cb44b-additional-scripts\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.899133 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9c939c80-5892-4220-8565-2efe304cb44b-additional-scripts\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.899383 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-run-ovn\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.899699 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-run\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.900687 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-log-ovn\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.901267 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9c939c80-5892-4220-8565-2efe304cb44b-scripts\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:58 crc kubenswrapper[4902]: I1202 14:34:58.921516 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qr9n\" (UniqueName: \"kubernetes.io/projected/9c939c80-5892-4220-8565-2efe304cb44b-kube-api-access-8qr9n\") pod \"ovn-controller-67clb-config-q8fph\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:59 crc kubenswrapper[4902]: I1202 14:34:59.128396 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:34:59 crc kubenswrapper[4902]: I1202 14:34:59.627753 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-67clb-config-q8fph"] Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.225389 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-67clb-config-q8fph" event={"ID":"9c939c80-5892-4220-8565-2efe304cb44b","Type":"ContainerStarted","Data":"ee207fc82dd5cd23b5d3f234d7a1114e6bfdb2d448eaadb7f8f6b4158b2f6e12"} Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.225721 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-67clb-config-q8fph" event={"ID":"9c939c80-5892-4220-8565-2efe304cb44b","Type":"ContainerStarted","Data":"0827dce39d73ad961cbc36c175f55a0f2ba028a3c37d6996f3ab0b0a5e946c61"} Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.245762 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-67clb-config-q8fph" podStartSLOduration=2.245740112 podStartE2EDuration="2.245740112s" podCreationTimestamp="2025-12-02 14:34:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:35:00.243052187 +0000 UTC m=+1131.434360906" watchObservedRunningTime="2025-12-02 14:35:00.245740112 +0000 UTC m=+1131.437048831" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.357837 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.494799 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.665668 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-jrhfj"] Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.666812 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-jrhfj" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.680586 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-jrhfj"] Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.735650 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f1c4d14-f9ad-4470-8165-d9c6375be0bc-operator-scripts\") pod \"cinder-db-create-jrhfj\" (UID: \"7f1c4d14-f9ad-4470-8165-d9c6375be0bc\") " pod="openstack/cinder-db-create-jrhfj" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.735984 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qml69\" (UniqueName: \"kubernetes.io/projected/7f1c4d14-f9ad-4470-8165-d9c6375be0bc-kube-api-access-qml69\") pod \"cinder-db-create-jrhfj\" (UID: \"7f1c4d14-f9ad-4470-8165-d9c6375be0bc\") " pod="openstack/cinder-db-create-jrhfj" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.739849 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-7jf2x"] Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.740831 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7jf2x" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.753164 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-7jf2x"] Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.767268 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-6886-account-create-update-dmh6g"] Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.768407 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6886-account-create-update-dmh6g" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.773616 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.789981 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6886-account-create-update-dmh6g"] Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.800527 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-sync-jb52r"] Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.801936 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.808679 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-2w9hl" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.810876 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-config-data" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.825123 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-jb52r"] Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.826975 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-86db49b7ff-ppngm" podUID="5f338b3f-0275-42a1-be04-016bd343525c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.119:5353: i/o timeout" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.838651 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-config-data\") pod \"watcher-db-sync-jb52r\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.838692 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6a94c2aa-4287-434c-bf10-99b25a39ce73-operator-scripts\") pod \"barbican-6886-account-create-update-dmh6g\" (UID: \"6a94c2aa-4287-434c-bf10-99b25a39ce73\") " pod="openstack/barbican-6886-account-create-update-dmh6g" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.838737 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qml69\" (UniqueName: \"kubernetes.io/projected/7f1c4d14-f9ad-4470-8165-d9c6375be0bc-kube-api-access-qml69\") pod \"cinder-db-create-jrhfj\" (UID: \"7f1c4d14-f9ad-4470-8165-d9c6375be0bc\") " pod="openstack/cinder-db-create-jrhfj" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.838761 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-combined-ca-bundle\") pod \"watcher-db-sync-jb52r\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.838777 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nqr2\" (UniqueName: \"kubernetes.io/projected/f0782122-af51-4295-bfc8-fb4a0388cdd0-kube-api-access-7nqr2\") pod \"watcher-db-sync-jb52r\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.839010 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hp7sp\" (UniqueName: \"kubernetes.io/projected/987ec49c-d8be-4d61-ba4a-7f5ee3016cf1-kube-api-access-hp7sp\") pod \"barbican-db-create-7jf2x\" (UID: \"987ec49c-d8be-4d61-ba4a-7f5ee3016cf1\") " pod="openstack/barbican-db-create-7jf2x" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.839116 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f1c4d14-f9ad-4470-8165-d9c6375be0bc-operator-scripts\") pod \"cinder-db-create-jrhfj\" (UID: \"7f1c4d14-f9ad-4470-8165-d9c6375be0bc\") " pod="openstack/cinder-db-create-jrhfj" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.839162 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6h9d\" (UniqueName: \"kubernetes.io/projected/6a94c2aa-4287-434c-bf10-99b25a39ce73-kube-api-access-d6h9d\") pod \"barbican-6886-account-create-update-dmh6g\" (UID: \"6a94c2aa-4287-434c-bf10-99b25a39ce73\") " pod="openstack/barbican-6886-account-create-update-dmh6g" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.839194 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-db-sync-config-data\") pod \"watcher-db-sync-jb52r\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.839391 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/987ec49c-d8be-4d61-ba4a-7f5ee3016cf1-operator-scripts\") pod \"barbican-db-create-7jf2x\" (UID: \"987ec49c-d8be-4d61-ba4a-7f5ee3016cf1\") " pod="openstack/barbican-db-create-7jf2x" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.840036 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f1c4d14-f9ad-4470-8165-d9c6375be0bc-operator-scripts\") pod \"cinder-db-create-jrhfj\" (UID: \"7f1c4d14-f9ad-4470-8165-d9c6375be0bc\") " pod="openstack/cinder-db-create-jrhfj" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.866611 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qml69\" (UniqueName: \"kubernetes.io/projected/7f1c4d14-f9ad-4470-8165-d9c6375be0bc-kube-api-access-qml69\") pod \"cinder-db-create-jrhfj\" (UID: \"7f1c4d14-f9ad-4470-8165-d9c6375be0bc\") " pod="openstack/cinder-db-create-jrhfj" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.873690 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-b438-account-create-update-9wj9w"] Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.874842 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-b438-account-create-update-9wj9w" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.877228 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.886392 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-b438-account-create-update-9wj9w"] Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.940282 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-config-data\") pod \"watcher-db-sync-jb52r\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.940317 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6a94c2aa-4287-434c-bf10-99b25a39ce73-operator-scripts\") pod \"barbican-6886-account-create-update-dmh6g\" (UID: \"6a94c2aa-4287-434c-bf10-99b25a39ce73\") " pod="openstack/barbican-6886-account-create-update-dmh6g" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.940342 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hngt4\" (UniqueName: \"kubernetes.io/projected/a752ac02-1cb9-4927-b81a-d466abb1d58e-kube-api-access-hngt4\") pod \"cinder-b438-account-create-update-9wj9w\" (UID: \"a752ac02-1cb9-4927-b81a-d466abb1d58e\") " pod="openstack/cinder-b438-account-create-update-9wj9w" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.940370 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a752ac02-1cb9-4927-b81a-d466abb1d58e-operator-scripts\") pod \"cinder-b438-account-create-update-9wj9w\" (UID: \"a752ac02-1cb9-4927-b81a-d466abb1d58e\") " pod="openstack/cinder-b438-account-create-update-9wj9w" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.940394 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nqr2\" (UniqueName: \"kubernetes.io/projected/f0782122-af51-4295-bfc8-fb4a0388cdd0-kube-api-access-7nqr2\") pod \"watcher-db-sync-jb52r\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.940410 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-combined-ca-bundle\") pod \"watcher-db-sync-jb52r\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.940455 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hp7sp\" (UniqueName: \"kubernetes.io/projected/987ec49c-d8be-4d61-ba4a-7f5ee3016cf1-kube-api-access-hp7sp\") pod \"barbican-db-create-7jf2x\" (UID: \"987ec49c-d8be-4d61-ba4a-7f5ee3016cf1\") " pod="openstack/barbican-db-create-7jf2x" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.940484 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6h9d\" (UniqueName: \"kubernetes.io/projected/6a94c2aa-4287-434c-bf10-99b25a39ce73-kube-api-access-d6h9d\") pod \"barbican-6886-account-create-update-dmh6g\" (UID: \"6a94c2aa-4287-434c-bf10-99b25a39ce73\") " pod="openstack/barbican-6886-account-create-update-dmh6g" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.940500 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-db-sync-config-data\") pod \"watcher-db-sync-jb52r\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.940580 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/987ec49c-d8be-4d61-ba4a-7f5ee3016cf1-operator-scripts\") pod \"barbican-db-create-7jf2x\" (UID: \"987ec49c-d8be-4d61-ba4a-7f5ee3016cf1\") " pod="openstack/barbican-db-create-7jf2x" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.941157 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/987ec49c-d8be-4d61-ba4a-7f5ee3016cf1-operator-scripts\") pod \"barbican-db-create-7jf2x\" (UID: \"987ec49c-d8be-4d61-ba4a-7f5ee3016cf1\") " pod="openstack/barbican-db-create-7jf2x" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.941339 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6a94c2aa-4287-434c-bf10-99b25a39ce73-operator-scripts\") pod \"barbican-6886-account-create-update-dmh6g\" (UID: \"6a94c2aa-4287-434c-bf10-99b25a39ce73\") " pod="openstack/barbican-6886-account-create-update-dmh6g" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.943768 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-config-data\") pod \"watcher-db-sync-jb52r\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.943897 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-db-sync-config-data\") pod \"watcher-db-sync-jb52r\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.950736 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-combined-ca-bundle\") pod \"watcher-db-sync-jb52r\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.961299 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hp7sp\" (UniqueName: \"kubernetes.io/projected/987ec49c-d8be-4d61-ba4a-7f5ee3016cf1-kube-api-access-hp7sp\") pod \"barbican-db-create-7jf2x\" (UID: \"987ec49c-d8be-4d61-ba4a-7f5ee3016cf1\") " pod="openstack/barbican-db-create-7jf2x" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.965934 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6h9d\" (UniqueName: \"kubernetes.io/projected/6a94c2aa-4287-434c-bf10-99b25a39ce73-kube-api-access-d6h9d\") pod \"barbican-6886-account-create-update-dmh6g\" (UID: \"6a94c2aa-4287-434c-bf10-99b25a39ce73\") " pod="openstack/barbican-6886-account-create-update-dmh6g" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.970233 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nqr2\" (UniqueName: \"kubernetes.io/projected/f0782122-af51-4295-bfc8-fb4a0388cdd0-kube-api-access-7nqr2\") pod \"watcher-db-sync-jb52r\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:00 crc kubenswrapper[4902]: I1202 14:35:00.987097 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-jrhfj" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.042023 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hngt4\" (UniqueName: \"kubernetes.io/projected/a752ac02-1cb9-4927-b81a-d466abb1d58e-kube-api-access-hngt4\") pod \"cinder-b438-account-create-update-9wj9w\" (UID: \"a752ac02-1cb9-4927-b81a-d466abb1d58e\") " pod="openstack/cinder-b438-account-create-update-9wj9w" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.042073 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a752ac02-1cb9-4927-b81a-d466abb1d58e-operator-scripts\") pod \"cinder-b438-account-create-update-9wj9w\" (UID: \"a752ac02-1cb9-4927-b81a-d466abb1d58e\") " pod="openstack/cinder-b438-account-create-update-9wj9w" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.043077 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a752ac02-1cb9-4927-b81a-d466abb1d58e-operator-scripts\") pod \"cinder-b438-account-create-update-9wj9w\" (UID: \"a752ac02-1cb9-4927-b81a-d466abb1d58e\") " pod="openstack/cinder-b438-account-create-update-9wj9w" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.053311 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-0019-account-create-update-l7vfs"] Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.053628 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7jf2x" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.054435 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-0019-account-create-update-l7vfs" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.057672 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.072106 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hngt4\" (UniqueName: \"kubernetes.io/projected/a752ac02-1cb9-4927-b81a-d466abb1d58e-kube-api-access-hngt4\") pod \"cinder-b438-account-create-update-9wj9w\" (UID: \"a752ac02-1cb9-4927-b81a-d466abb1d58e\") " pod="openstack/cinder-b438-account-create-update-9wj9w" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.081147 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-gs4g4"] Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.082248 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-gs4g4" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.084839 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6886-account-create-update-dmh6g" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.129388 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.143661 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmn2k\" (UniqueName: \"kubernetes.io/projected/33f979ec-84eb-43b4-813a-32c7506b868f-kube-api-access-fmn2k\") pod \"neutron-0019-account-create-update-l7vfs\" (UID: \"33f979ec-84eb-43b4-813a-32c7506b868f\") " pod="openstack/neutron-0019-account-create-update-l7vfs" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.143727 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d07765f-4bd5-46d0-9955-ccbd681f556b-operator-scripts\") pod \"neutron-db-create-gs4g4\" (UID: \"2d07765f-4bd5-46d0-9955-ccbd681f556b\") " pod="openstack/neutron-db-create-gs4g4" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.143767 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b545p\" (UniqueName: \"kubernetes.io/projected/2d07765f-4bd5-46d0-9955-ccbd681f556b-kube-api-access-b545p\") pod \"neutron-db-create-gs4g4\" (UID: \"2d07765f-4bd5-46d0-9955-ccbd681f556b\") " pod="openstack/neutron-db-create-gs4g4" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.143875 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33f979ec-84eb-43b4-813a-32c7506b868f-operator-scripts\") pod \"neutron-0019-account-create-update-l7vfs\" (UID: \"33f979ec-84eb-43b4-813a-32c7506b868f\") " pod="openstack/neutron-0019-account-create-update-l7vfs" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.176858 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-gs4g4"] Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.176901 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-0019-account-create-update-l7vfs"] Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.219954 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-b438-account-create-update-9wj9w" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.244414 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33f979ec-84eb-43b4-813a-32c7506b868f-operator-scripts\") pod \"neutron-0019-account-create-update-l7vfs\" (UID: \"33f979ec-84eb-43b4-813a-32c7506b868f\") " pod="openstack/neutron-0019-account-create-update-l7vfs" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.244503 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmn2k\" (UniqueName: \"kubernetes.io/projected/33f979ec-84eb-43b4-813a-32c7506b868f-kube-api-access-fmn2k\") pod \"neutron-0019-account-create-update-l7vfs\" (UID: \"33f979ec-84eb-43b4-813a-32c7506b868f\") " pod="openstack/neutron-0019-account-create-update-l7vfs" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.244534 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d07765f-4bd5-46d0-9955-ccbd681f556b-operator-scripts\") pod \"neutron-db-create-gs4g4\" (UID: \"2d07765f-4bd5-46d0-9955-ccbd681f556b\") " pod="openstack/neutron-db-create-gs4g4" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.244654 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b545p\" (UniqueName: \"kubernetes.io/projected/2d07765f-4bd5-46d0-9955-ccbd681f556b-kube-api-access-b545p\") pod \"neutron-db-create-gs4g4\" (UID: \"2d07765f-4bd5-46d0-9955-ccbd681f556b\") " pod="openstack/neutron-db-create-gs4g4" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.246025 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d07765f-4bd5-46d0-9955-ccbd681f556b-operator-scripts\") pod \"neutron-db-create-gs4g4\" (UID: \"2d07765f-4bd5-46d0-9955-ccbd681f556b\") " pod="openstack/neutron-db-create-gs4g4" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.249279 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33f979ec-84eb-43b4-813a-32c7506b868f-operator-scripts\") pod \"neutron-0019-account-create-update-l7vfs\" (UID: \"33f979ec-84eb-43b4-813a-32c7506b868f\") " pod="openstack/neutron-0019-account-create-update-l7vfs" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.270864 4902 generic.go:334] "Generic (PLEG): container finished" podID="9c939c80-5892-4220-8565-2efe304cb44b" containerID="ee207fc82dd5cd23b5d3f234d7a1114e6bfdb2d448eaadb7f8f6b4158b2f6e12" exitCode=0 Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.271006 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-67clb-config-q8fph" event={"ID":"9c939c80-5892-4220-8565-2efe304cb44b","Type":"ContainerDied","Data":"ee207fc82dd5cd23b5d3f234d7a1114e6bfdb2d448eaadb7f8f6b4158b2f6e12"} Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.273812 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmn2k\" (UniqueName: \"kubernetes.io/projected/33f979ec-84eb-43b4-813a-32c7506b868f-kube-api-access-fmn2k\") pod \"neutron-0019-account-create-update-l7vfs\" (UID: \"33f979ec-84eb-43b4-813a-32c7506b868f\") " pod="openstack/neutron-0019-account-create-update-l7vfs" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.277548 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b545p\" (UniqueName: \"kubernetes.io/projected/2d07765f-4bd5-46d0-9955-ccbd681f556b-kube-api-access-b545p\") pod \"neutron-db-create-gs4g4\" (UID: \"2d07765f-4bd5-46d0-9955-ccbd681f556b\") " pod="openstack/neutron-db-create-gs4g4" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.399859 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-0019-account-create-update-l7vfs" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.438744 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-gs4g4" Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.584270 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-jrhfj"] Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.664486 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-7jf2x"] Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.728444 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-jb52r"] Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.739629 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6886-account-create-update-dmh6g"] Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.883744 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-b438-account-create-update-9wj9w"] Dec 02 14:35:01 crc kubenswrapper[4902]: I1202 14:35:01.963421 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-0019-account-create-update-l7vfs"] Dec 02 14:35:01 crc kubenswrapper[4902]: W1202 14:35:01.976197 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33f979ec_84eb_43b4_813a_32c7506b868f.slice/crio-ceba28e27048aa9a371f25821f72e10f4b8807caedd800a6a4d4653c72138fa4 WatchSource:0}: Error finding container ceba28e27048aa9a371f25821f72e10f4b8807caedd800a6a4d4653c72138fa4: Status 404 returned error can't find the container with id ceba28e27048aa9a371f25821f72e10f4b8807caedd800a6a4d4653c72138fa4 Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.038015 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-gs4g4"] Dec 02 14:35:02 crc kubenswrapper[4902]: W1202 14:35:02.051604 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d07765f_4bd5_46d0_9955_ccbd681f556b.slice/crio-218bf141c57bc41d0fc512e79f0c752636ae1b1467aec97671ed7fa144776dea WatchSource:0}: Error finding container 218bf141c57bc41d0fc512e79f0c752636ae1b1467aec97671ed7fa144776dea: Status 404 returned error can't find the container with id 218bf141c57bc41d0fc512e79f0c752636ae1b1467aec97671ed7fa144776dea Dec 02 14:35:02 crc kubenswrapper[4902]: E1202 14:35:02.214627 4902 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.251:41512->38.102.83.251:38083: write tcp 38.102.83.251:41512->38.102.83.251:38083: write: broken pipe Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.327255 4902 generic.go:334] "Generic (PLEG): container finished" podID="987ec49c-d8be-4d61-ba4a-7f5ee3016cf1" containerID="97168a240ab2094d40f6493794122d8c35cd1946080123b0a9c00989470034a3" exitCode=0 Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.327340 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7jf2x" event={"ID":"987ec49c-d8be-4d61-ba4a-7f5ee3016cf1","Type":"ContainerDied","Data":"97168a240ab2094d40f6493794122d8c35cd1946080123b0a9c00989470034a3"} Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.327366 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7jf2x" event={"ID":"987ec49c-d8be-4d61-ba4a-7f5ee3016cf1","Type":"ContainerStarted","Data":"4f1b1b6268d8887c73f3c76f9da93459c5b3c0d9892d5a0f6371c8c04efc7e1f"} Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.330488 4902 generic.go:334] "Generic (PLEG): container finished" podID="7f1c4d14-f9ad-4470-8165-d9c6375be0bc" containerID="8eed2de031f89db2dc948219ba6b135869c1d9b719e7f56959140bc2587b9908" exitCode=0 Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.330630 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-jrhfj" event={"ID":"7f1c4d14-f9ad-4470-8165-d9c6375be0bc","Type":"ContainerDied","Data":"8eed2de031f89db2dc948219ba6b135869c1d9b719e7f56959140bc2587b9908"} Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.330675 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-jrhfj" event={"ID":"7f1c4d14-f9ad-4470-8165-d9c6375be0bc","Type":"ContainerStarted","Data":"e10afee9999cbc1b64c910f6b3251c1edeac88c333b0c09350a47f09f6210bef"} Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.332197 4902 generic.go:334] "Generic (PLEG): container finished" podID="6a94c2aa-4287-434c-bf10-99b25a39ce73" containerID="5c0ad0817cdf4b7b4cba6c31348a5bba5fe353b0e2796b6e6ecea5a87d5bed41" exitCode=0 Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.332252 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6886-account-create-update-dmh6g" event={"ID":"6a94c2aa-4287-434c-bf10-99b25a39ce73","Type":"ContainerDied","Data":"5c0ad0817cdf4b7b4cba6c31348a5bba5fe353b0e2796b6e6ecea5a87d5bed41"} Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.332278 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6886-account-create-update-dmh6g" event={"ID":"6a94c2aa-4287-434c-bf10-99b25a39ce73","Type":"ContainerStarted","Data":"194308fb3362b56bfde42bbebf182fd41cb2239bc1b05dfb4ef83c20a4979848"} Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.335119 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-b438-account-create-update-9wj9w" event={"ID":"a752ac02-1cb9-4927-b81a-d466abb1d58e","Type":"ContainerStarted","Data":"86beb17541ea64955ebbd7d106dac218ea8d61a37f8d441e7bdde82c0d929a1b"} Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.335370 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-b438-account-create-update-9wj9w" event={"ID":"a752ac02-1cb9-4927-b81a-d466abb1d58e","Type":"ContainerStarted","Data":"98a52d66b3b1cd39f8ca4e52ae2078de03eb6313c69ca76a8b67945290c0c2b5"} Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.344723 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-gs4g4" event={"ID":"2d07765f-4bd5-46d0-9955-ccbd681f556b","Type":"ContainerStarted","Data":"218bf141c57bc41d0fc512e79f0c752636ae1b1467aec97671ed7fa144776dea"} Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.349011 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-jb52r" event={"ID":"f0782122-af51-4295-bfc8-fb4a0388cdd0","Type":"ContainerStarted","Data":"f31c546ed51c674c746b316bb09de01a4f5e161d1b5e15c1c11d6694a914cef2"} Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.360442 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-b438-account-create-update-9wj9w" podStartSLOduration=2.360419951 podStartE2EDuration="2.360419951s" podCreationTimestamp="2025-12-02 14:35:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:35:02.357994224 +0000 UTC m=+1133.549302933" watchObservedRunningTime="2025-12-02 14:35:02.360419951 +0000 UTC m=+1133.551728660" Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.366209 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-0019-account-create-update-l7vfs" event={"ID":"33f979ec-84eb-43b4-813a-32c7506b868f","Type":"ContainerStarted","Data":"f304fde205272911e2bfa0eca611196be5b17575507fe9c70eeb9caed6dc9216"} Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.366248 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-0019-account-create-update-l7vfs" event={"ID":"33f979ec-84eb-43b4-813a-32c7506b868f","Type":"ContainerStarted","Data":"ceba28e27048aa9a371f25821f72e10f4b8807caedd800a6a4d4653c72138fa4"} Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.408615 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-0019-account-create-update-l7vfs" podStartSLOduration=1.408597225 podStartE2EDuration="1.408597225s" podCreationTimestamp="2025-12-02 14:35:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:35:02.406017793 +0000 UTC m=+1133.597326502" watchObservedRunningTime="2025-12-02 14:35:02.408597225 +0000 UTC m=+1133.599905934" Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.734339 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.790849 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-run-ovn\") pod \"9c939c80-5892-4220-8565-2efe304cb44b\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.790920 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qr9n\" (UniqueName: \"kubernetes.io/projected/9c939c80-5892-4220-8565-2efe304cb44b-kube-api-access-8qr9n\") pod \"9c939c80-5892-4220-8565-2efe304cb44b\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.790969 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9c939c80-5892-4220-8565-2efe304cb44b-scripts\") pod \"9c939c80-5892-4220-8565-2efe304cb44b\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.791061 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-run\") pod \"9c939c80-5892-4220-8565-2efe304cb44b\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.791178 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-log-ovn\") pod \"9c939c80-5892-4220-8565-2efe304cb44b\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.791216 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9c939c80-5892-4220-8565-2efe304cb44b-additional-scripts\") pod \"9c939c80-5892-4220-8565-2efe304cb44b\" (UID: \"9c939c80-5892-4220-8565-2efe304cb44b\") " Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.792899 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "9c939c80-5892-4220-8565-2efe304cb44b" (UID: "9c939c80-5892-4220-8565-2efe304cb44b"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.793410 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-run" (OuterVolumeSpecName: "var-run") pod "9c939c80-5892-4220-8565-2efe304cb44b" (UID: "9c939c80-5892-4220-8565-2efe304cb44b"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.793453 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "9c939c80-5892-4220-8565-2efe304cb44b" (UID: "9c939c80-5892-4220-8565-2efe304cb44b"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.796011 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c939c80-5892-4220-8565-2efe304cb44b-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "9c939c80-5892-4220-8565-2efe304cb44b" (UID: "9c939c80-5892-4220-8565-2efe304cb44b"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.796514 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c939c80-5892-4220-8565-2efe304cb44b-scripts" (OuterVolumeSpecName: "scripts") pod "9c939c80-5892-4220-8565-2efe304cb44b" (UID: "9c939c80-5892-4220-8565-2efe304cb44b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.798277 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c939c80-5892-4220-8565-2efe304cb44b-kube-api-access-8qr9n" (OuterVolumeSpecName: "kube-api-access-8qr9n") pod "9c939c80-5892-4220-8565-2efe304cb44b" (UID: "9c939c80-5892-4220-8565-2efe304cb44b"). InnerVolumeSpecName "kube-api-access-8qr9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.894018 4902 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.894058 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qr9n\" (UniqueName: \"kubernetes.io/projected/9c939c80-5892-4220-8565-2efe304cb44b-kube-api-access-8qr9n\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.894072 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9c939c80-5892-4220-8565-2efe304cb44b-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.894084 4902 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-run\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.894095 4902 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9c939c80-5892-4220-8565-2efe304cb44b-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:02 crc kubenswrapper[4902]: I1202 14:35:02.894105 4902 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9c939c80-5892-4220-8565-2efe304cb44b-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.378421 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-67clb-config-q8fph"] Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.381692 4902 generic.go:334] "Generic (PLEG): container finished" podID="33f979ec-84eb-43b4-813a-32c7506b868f" containerID="f304fde205272911e2bfa0eca611196be5b17575507fe9c70eeb9caed6dc9216" exitCode=0 Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.381755 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-0019-account-create-update-l7vfs" event={"ID":"33f979ec-84eb-43b4-813a-32c7506b868f","Type":"ContainerDied","Data":"f304fde205272911e2bfa0eca611196be5b17575507fe9c70eeb9caed6dc9216"} Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.385597 4902 generic.go:334] "Generic (PLEG): container finished" podID="a752ac02-1cb9-4927-b81a-d466abb1d58e" containerID="86beb17541ea64955ebbd7d106dac218ea8d61a37f8d441e7bdde82c0d929a1b" exitCode=0 Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.385656 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-b438-account-create-update-9wj9w" event={"ID":"a752ac02-1cb9-4927-b81a-d466abb1d58e","Type":"ContainerDied","Data":"86beb17541ea64955ebbd7d106dac218ea8d61a37f8d441e7bdde82c0d929a1b"} Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.392586 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-67clb-config-q8fph"] Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.397390 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0827dce39d73ad961cbc36c175f55a0f2ba028a3c37d6996f3ab0b0a5e946c61" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.397468 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67clb-config-q8fph" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.400952 4902 generic.go:334] "Generic (PLEG): container finished" podID="2d07765f-4bd5-46d0-9955-ccbd681f556b" containerID="697664da42902613f1144f376242de74a5779eaa479fa28ae9191d5f64159457" exitCode=0 Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.401111 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-gs4g4" event={"ID":"2d07765f-4bd5-46d0-9955-ccbd681f556b","Type":"ContainerDied","Data":"697664da42902613f1144f376242de74a5779eaa479fa28ae9191d5f64159457"} Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.581160 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-67clb" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.734700 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-8nfb5"] Dec 02 14:35:03 crc kubenswrapper[4902]: E1202 14:35:03.734984 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c939c80-5892-4220-8565-2efe304cb44b" containerName="ovn-config" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.734996 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c939c80-5892-4220-8565-2efe304cb44b" containerName="ovn-config" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.735166 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c939c80-5892-4220-8565-2efe304cb44b" containerName="ovn-config" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.735694 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-8nfb5" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.748854 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.748866 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-5shcs" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.749073 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.749246 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.785059 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-8nfb5"] Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.930538 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkg5t\" (UniqueName: \"kubernetes.io/projected/90848399-1e3c-448f-a1c1-8dd64a608fdb-kube-api-access-dkg5t\") pod \"keystone-db-sync-8nfb5\" (UID: \"90848399-1e3c-448f-a1c1-8dd64a608fdb\") " pod="openstack/keystone-db-sync-8nfb5" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.930670 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90848399-1e3c-448f-a1c1-8dd64a608fdb-config-data\") pod \"keystone-db-sync-8nfb5\" (UID: \"90848399-1e3c-448f-a1c1-8dd64a608fdb\") " pod="openstack/keystone-db-sync-8nfb5" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.930821 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90848399-1e3c-448f-a1c1-8dd64a608fdb-combined-ca-bundle\") pod \"keystone-db-sync-8nfb5\" (UID: \"90848399-1e3c-448f-a1c1-8dd64a608fdb\") " pod="openstack/keystone-db-sync-8nfb5" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.969159 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-8gzqw"] Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.971601 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8gzqw" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.973891 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.974391 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-64b6w" Dec 02 14:35:03 crc kubenswrapper[4902]: I1202 14:35:03.982898 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-8gzqw"] Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.032417 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90848399-1e3c-448f-a1c1-8dd64a608fdb-combined-ca-bundle\") pod \"keystone-db-sync-8nfb5\" (UID: \"90848399-1e3c-448f-a1c1-8dd64a608fdb\") " pod="openstack/keystone-db-sync-8nfb5" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.032463 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkg5t\" (UniqueName: \"kubernetes.io/projected/90848399-1e3c-448f-a1c1-8dd64a608fdb-kube-api-access-dkg5t\") pod \"keystone-db-sync-8nfb5\" (UID: \"90848399-1e3c-448f-a1c1-8dd64a608fdb\") " pod="openstack/keystone-db-sync-8nfb5" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.032532 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90848399-1e3c-448f-a1c1-8dd64a608fdb-config-data\") pod \"keystone-db-sync-8nfb5\" (UID: \"90848399-1e3c-448f-a1c1-8dd64a608fdb\") " pod="openstack/keystone-db-sync-8nfb5" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.037841 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90848399-1e3c-448f-a1c1-8dd64a608fdb-combined-ca-bundle\") pod \"keystone-db-sync-8nfb5\" (UID: \"90848399-1e3c-448f-a1c1-8dd64a608fdb\") " pod="openstack/keystone-db-sync-8nfb5" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.038342 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90848399-1e3c-448f-a1c1-8dd64a608fdb-config-data\") pod \"keystone-db-sync-8nfb5\" (UID: \"90848399-1e3c-448f-a1c1-8dd64a608fdb\") " pod="openstack/keystone-db-sync-8nfb5" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.052371 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkg5t\" (UniqueName: \"kubernetes.io/projected/90848399-1e3c-448f-a1c1-8dd64a608fdb-kube-api-access-dkg5t\") pod \"keystone-db-sync-8nfb5\" (UID: \"90848399-1e3c-448f-a1c1-8dd64a608fdb\") " pod="openstack/keystone-db-sync-8nfb5" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.069153 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-8nfb5" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.081319 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-jrhfj" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.103100 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7jf2x" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.108003 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6886-account-create-update-dmh6g" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.134552 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-combined-ca-bundle\") pod \"glance-db-sync-8gzqw\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " pod="openstack/glance-db-sync-8gzqw" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.134632 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-config-data\") pod \"glance-db-sync-8gzqw\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " pod="openstack/glance-db-sync-8gzqw" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.134659 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkhf8\" (UniqueName: \"kubernetes.io/projected/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-kube-api-access-lkhf8\") pod \"glance-db-sync-8gzqw\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " pod="openstack/glance-db-sync-8gzqw" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.134715 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-db-sync-config-data\") pod \"glance-db-sync-8gzqw\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " pod="openstack/glance-db-sync-8gzqw" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.236212 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hp7sp\" (UniqueName: \"kubernetes.io/projected/987ec49c-d8be-4d61-ba4a-7f5ee3016cf1-kube-api-access-hp7sp\") pod \"987ec49c-d8be-4d61-ba4a-7f5ee3016cf1\" (UID: \"987ec49c-d8be-4d61-ba4a-7f5ee3016cf1\") " Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.236278 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f1c4d14-f9ad-4470-8165-d9c6375be0bc-operator-scripts\") pod \"7f1c4d14-f9ad-4470-8165-d9c6375be0bc\" (UID: \"7f1c4d14-f9ad-4470-8165-d9c6375be0bc\") " Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.236348 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6h9d\" (UniqueName: \"kubernetes.io/projected/6a94c2aa-4287-434c-bf10-99b25a39ce73-kube-api-access-d6h9d\") pod \"6a94c2aa-4287-434c-bf10-99b25a39ce73\" (UID: \"6a94c2aa-4287-434c-bf10-99b25a39ce73\") " Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.236375 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/987ec49c-d8be-4d61-ba4a-7f5ee3016cf1-operator-scripts\") pod \"987ec49c-d8be-4d61-ba4a-7f5ee3016cf1\" (UID: \"987ec49c-d8be-4d61-ba4a-7f5ee3016cf1\") " Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.236449 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6a94c2aa-4287-434c-bf10-99b25a39ce73-operator-scripts\") pod \"6a94c2aa-4287-434c-bf10-99b25a39ce73\" (UID: \"6a94c2aa-4287-434c-bf10-99b25a39ce73\") " Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.236506 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qml69\" (UniqueName: \"kubernetes.io/projected/7f1c4d14-f9ad-4470-8165-d9c6375be0bc-kube-api-access-qml69\") pod \"7f1c4d14-f9ad-4470-8165-d9c6375be0bc\" (UID: \"7f1c4d14-f9ad-4470-8165-d9c6375be0bc\") " Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.236763 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-combined-ca-bundle\") pod \"glance-db-sync-8gzqw\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " pod="openstack/glance-db-sync-8gzqw" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.236819 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-config-data\") pod \"glance-db-sync-8gzqw\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " pod="openstack/glance-db-sync-8gzqw" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.236841 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkhf8\" (UniqueName: \"kubernetes.io/projected/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-kube-api-access-lkhf8\") pod \"glance-db-sync-8gzqw\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " pod="openstack/glance-db-sync-8gzqw" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.236899 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-db-sync-config-data\") pod \"glance-db-sync-8gzqw\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " pod="openstack/glance-db-sync-8gzqw" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.238584 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a94c2aa-4287-434c-bf10-99b25a39ce73-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6a94c2aa-4287-434c-bf10-99b25a39ce73" (UID: "6a94c2aa-4287-434c-bf10-99b25a39ce73"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.238602 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/987ec49c-d8be-4d61-ba4a-7f5ee3016cf1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "987ec49c-d8be-4d61-ba4a-7f5ee3016cf1" (UID: "987ec49c-d8be-4d61-ba4a-7f5ee3016cf1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.238636 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f1c4d14-f9ad-4470-8165-d9c6375be0bc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7f1c4d14-f9ad-4470-8165-d9c6375be0bc" (UID: "7f1c4d14-f9ad-4470-8165-d9c6375be0bc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.241105 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f1c4d14-f9ad-4470-8165-d9c6375be0bc-kube-api-access-qml69" (OuterVolumeSpecName: "kube-api-access-qml69") pod "7f1c4d14-f9ad-4470-8165-d9c6375be0bc" (UID: "7f1c4d14-f9ad-4470-8165-d9c6375be0bc"). InnerVolumeSpecName "kube-api-access-qml69". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.241744 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a94c2aa-4287-434c-bf10-99b25a39ce73-kube-api-access-d6h9d" (OuterVolumeSpecName: "kube-api-access-d6h9d") pod "6a94c2aa-4287-434c-bf10-99b25a39ce73" (UID: "6a94c2aa-4287-434c-bf10-99b25a39ce73"). InnerVolumeSpecName "kube-api-access-d6h9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.242094 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-db-sync-config-data\") pod \"glance-db-sync-8gzqw\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " pod="openstack/glance-db-sync-8gzqw" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.242587 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-config-data\") pod \"glance-db-sync-8gzqw\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " pod="openstack/glance-db-sync-8gzqw" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.242725 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-combined-ca-bundle\") pod \"glance-db-sync-8gzqw\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " pod="openstack/glance-db-sync-8gzqw" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.244179 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/987ec49c-d8be-4d61-ba4a-7f5ee3016cf1-kube-api-access-hp7sp" (OuterVolumeSpecName: "kube-api-access-hp7sp") pod "987ec49c-d8be-4d61-ba4a-7f5ee3016cf1" (UID: "987ec49c-d8be-4d61-ba4a-7f5ee3016cf1"). InnerVolumeSpecName "kube-api-access-hp7sp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.254657 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkhf8\" (UniqueName: \"kubernetes.io/projected/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-kube-api-access-lkhf8\") pod \"glance-db-sync-8gzqw\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " pod="openstack/glance-db-sync-8gzqw" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.338345 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qml69\" (UniqueName: \"kubernetes.io/projected/7f1c4d14-f9ad-4470-8165-d9c6375be0bc-kube-api-access-qml69\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.338381 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hp7sp\" (UniqueName: \"kubernetes.io/projected/987ec49c-d8be-4d61-ba4a-7f5ee3016cf1-kube-api-access-hp7sp\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.338396 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f1c4d14-f9ad-4470-8165-d9c6375be0bc-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.338410 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6h9d\" (UniqueName: \"kubernetes.io/projected/6a94c2aa-4287-434c-bf10-99b25a39ce73-kube-api-access-d6h9d\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.338422 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/987ec49c-d8be-4d61-ba4a-7f5ee3016cf1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.338432 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6a94c2aa-4287-434c-bf10-99b25a39ce73-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.398357 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8gzqw" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.411181 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-jrhfj" event={"ID":"7f1c4d14-f9ad-4470-8165-d9c6375be0bc","Type":"ContainerDied","Data":"e10afee9999cbc1b64c910f6b3251c1edeac88c333b0c09350a47f09f6210bef"} Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.411230 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e10afee9999cbc1b64c910f6b3251c1edeac88c333b0c09350a47f09f6210bef" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.411289 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-jrhfj" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.412749 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6886-account-create-update-dmh6g" event={"ID":"6a94c2aa-4287-434c-bf10-99b25a39ce73","Type":"ContainerDied","Data":"194308fb3362b56bfde42bbebf182fd41cb2239bc1b05dfb4ef83c20a4979848"} Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.412788 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="194308fb3362b56bfde42bbebf182fd41cb2239bc1b05dfb4ef83c20a4979848" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.412844 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6886-account-create-update-dmh6g" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.418494 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7jf2x" event={"ID":"987ec49c-d8be-4d61-ba4a-7f5ee3016cf1","Type":"ContainerDied","Data":"4f1b1b6268d8887c73f3c76f9da93459c5b3c0d9892d5a0f6371c8c04efc7e1f"} Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.418549 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f1b1b6268d8887c73f3c76f9da93459c5b3c0d9892d5a0f6371c8c04efc7e1f" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.418931 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7jf2x" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.568840 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-8nfb5"] Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.759889 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-0019-account-create-update-l7vfs" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.956454 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmn2k\" (UniqueName: \"kubernetes.io/projected/33f979ec-84eb-43b4-813a-32c7506b868f-kube-api-access-fmn2k\") pod \"33f979ec-84eb-43b4-813a-32c7506b868f\" (UID: \"33f979ec-84eb-43b4-813a-32c7506b868f\") " Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.956933 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33f979ec-84eb-43b4-813a-32c7506b868f-operator-scripts\") pod \"33f979ec-84eb-43b4-813a-32c7506b868f\" (UID: \"33f979ec-84eb-43b4-813a-32c7506b868f\") " Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.957586 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33f979ec-84eb-43b4-813a-32c7506b868f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "33f979ec-84eb-43b4-813a-32c7506b868f" (UID: "33f979ec-84eb-43b4-813a-32c7506b868f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:04 crc kubenswrapper[4902]: I1202 14:35:04.967079 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33f979ec-84eb-43b4-813a-32c7506b868f-kube-api-access-fmn2k" (OuterVolumeSpecName: "kube-api-access-fmn2k") pod "33f979ec-84eb-43b4-813a-32c7506b868f" (UID: "33f979ec-84eb-43b4-813a-32c7506b868f"). InnerVolumeSpecName "kube-api-access-fmn2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.031679 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-b438-account-create-update-9wj9w" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.036473 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-gs4g4" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.058842 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33f979ec-84eb-43b4-813a-32c7506b868f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.058906 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmn2k\" (UniqueName: \"kubernetes.io/projected/33f979ec-84eb-43b4-813a-32c7506b868f-kube-api-access-fmn2k\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.130854 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c939c80-5892-4220-8565-2efe304cb44b" path="/var/lib/kubelet/pods/9c939c80-5892-4220-8565-2efe304cb44b/volumes" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.146579 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-8gzqw"] Dec 02 14:35:05 crc kubenswrapper[4902]: W1202 14:35:05.151277 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7ca0ff1_b6ca_496c_b02e_71cc900b4433.slice/crio-c9588d8760c949c80926b90deca50c47cc86bb07cd3cc238afe75b4ef17ceaa4 WatchSource:0}: Error finding container c9588d8760c949c80926b90deca50c47cc86bb07cd3cc238afe75b4ef17ceaa4: Status 404 returned error can't find the container with id c9588d8760c949c80926b90deca50c47cc86bb07cd3cc238afe75b4ef17ceaa4 Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.159760 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d07765f-4bd5-46d0-9955-ccbd681f556b-operator-scripts\") pod \"2d07765f-4bd5-46d0-9955-ccbd681f556b\" (UID: \"2d07765f-4bd5-46d0-9955-ccbd681f556b\") " Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.159825 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b545p\" (UniqueName: \"kubernetes.io/projected/2d07765f-4bd5-46d0-9955-ccbd681f556b-kube-api-access-b545p\") pod \"2d07765f-4bd5-46d0-9955-ccbd681f556b\" (UID: \"2d07765f-4bd5-46d0-9955-ccbd681f556b\") " Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.159923 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hngt4\" (UniqueName: \"kubernetes.io/projected/a752ac02-1cb9-4927-b81a-d466abb1d58e-kube-api-access-hngt4\") pod \"a752ac02-1cb9-4927-b81a-d466abb1d58e\" (UID: \"a752ac02-1cb9-4927-b81a-d466abb1d58e\") " Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.160298 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d07765f-4bd5-46d0-9955-ccbd681f556b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2d07765f-4bd5-46d0-9955-ccbd681f556b" (UID: "2d07765f-4bd5-46d0-9955-ccbd681f556b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.160446 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a752ac02-1cb9-4927-b81a-d466abb1d58e-operator-scripts\") pod \"a752ac02-1cb9-4927-b81a-d466abb1d58e\" (UID: \"a752ac02-1cb9-4927-b81a-d466abb1d58e\") " Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.161010 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a752ac02-1cb9-4927-b81a-d466abb1d58e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a752ac02-1cb9-4927-b81a-d466abb1d58e" (UID: "a752ac02-1cb9-4927-b81a-d466abb1d58e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.161358 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d07765f-4bd5-46d0-9955-ccbd681f556b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.161384 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a752ac02-1cb9-4927-b81a-d466abb1d58e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.164026 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d07765f-4bd5-46d0-9955-ccbd681f556b-kube-api-access-b545p" (OuterVolumeSpecName: "kube-api-access-b545p") pod "2d07765f-4bd5-46d0-9955-ccbd681f556b" (UID: "2d07765f-4bd5-46d0-9955-ccbd681f556b"). InnerVolumeSpecName "kube-api-access-b545p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.167043 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a752ac02-1cb9-4927-b81a-d466abb1d58e-kube-api-access-hngt4" (OuterVolumeSpecName: "kube-api-access-hngt4") pod "a752ac02-1cb9-4927-b81a-d466abb1d58e" (UID: "a752ac02-1cb9-4927-b81a-d466abb1d58e"). InnerVolumeSpecName "kube-api-access-hngt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.262264 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b545p\" (UniqueName: \"kubernetes.io/projected/2d07765f-4bd5-46d0-9955-ccbd681f556b-kube-api-access-b545p\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.262302 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hngt4\" (UniqueName: \"kubernetes.io/projected/a752ac02-1cb9-4927-b81a-d466abb1d58e-kube-api-access-hngt4\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.429030 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-gs4g4" event={"ID":"2d07765f-4bd5-46d0-9955-ccbd681f556b","Type":"ContainerDied","Data":"218bf141c57bc41d0fc512e79f0c752636ae1b1467aec97671ed7fa144776dea"} Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.429074 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="218bf141c57bc41d0fc512e79f0c752636ae1b1467aec97671ed7fa144776dea" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.429200 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-gs4g4" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.430608 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-0019-account-create-update-l7vfs" event={"ID":"33f979ec-84eb-43b4-813a-32c7506b868f","Type":"ContainerDied","Data":"ceba28e27048aa9a371f25821f72e10f4b8807caedd800a6a4d4653c72138fa4"} Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.430651 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-0019-account-create-update-l7vfs" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.430650 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ceba28e27048aa9a371f25821f72e10f4b8807caedd800a6a4d4653c72138fa4" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.433842 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8gzqw" event={"ID":"a7ca0ff1-b6ca-496c-b02e-71cc900b4433","Type":"ContainerStarted","Data":"c9588d8760c949c80926b90deca50c47cc86bb07cd3cc238afe75b4ef17ceaa4"} Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.435858 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-b438-account-create-update-9wj9w" event={"ID":"a752ac02-1cb9-4927-b81a-d466abb1d58e","Type":"ContainerDied","Data":"98a52d66b3b1cd39f8ca4e52ae2078de03eb6313c69ca76a8b67945290c0c2b5"} Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.435905 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98a52d66b3b1cd39f8ca4e52ae2078de03eb6313c69ca76a8b67945290c0c2b5" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.435917 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-b438-account-create-update-9wj9w" Dec 02 14:35:05 crc kubenswrapper[4902]: I1202 14:35:05.437557 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-8nfb5" event={"ID":"90848399-1e3c-448f-a1c1-8dd64a608fdb","Type":"ContainerStarted","Data":"a3b1c4bcfd9b7b56054c1c007b8e59475da3ebed82328af071aeb1b127644b2b"} Dec 02 14:35:06 crc kubenswrapper[4902]: I1202 14:35:06.609727 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:06 crc kubenswrapper[4902]: I1202 14:35:06.614642 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:07 crc kubenswrapper[4902]: I1202 14:35:07.467850 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:08 crc kubenswrapper[4902]: I1202 14:35:08.323034 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:35:08 crc kubenswrapper[4902]: I1202 14:35:08.347033 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0a523405-44a7-49cc-ae19-25ebbdbc8d73-etc-swift\") pod \"swift-storage-0\" (UID: \"0a523405-44a7-49cc-ae19-25ebbdbc8d73\") " pod="openstack/swift-storage-0" Dec 02 14:35:08 crc kubenswrapper[4902]: I1202 14:35:08.415640 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 02 14:35:09 crc kubenswrapper[4902]: I1202 14:35:09.681912 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 14:35:09 crc kubenswrapper[4902]: I1202 14:35:09.683362 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="prometheus" containerID="cri-o://85eeb6d07896470dc279bd1a847bb07e307765bbb655f25a796af1cf61b990be" gracePeriod=600 Dec 02 14:35:09 crc kubenswrapper[4902]: I1202 14:35:09.683510 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="config-reloader" containerID="cri-o://377c4f65bd35a79aa651a1830b4cf49c3b1b0d04cfa5b27d66ca2ffe601db757" gracePeriod=600 Dec 02 14:35:09 crc kubenswrapper[4902]: I1202 14:35:09.683466 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="thanos-sidecar" containerID="cri-o://741b7773fa54e6bb38c4cc403804a3a6e0413880a7993b0f261958613b7ee5a0" gracePeriod=600 Dec 02 14:35:10 crc kubenswrapper[4902]: I1202 14:35:10.502184 4902 generic.go:334] "Generic (PLEG): container finished" podID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerID="741b7773fa54e6bb38c4cc403804a3a6e0413880a7993b0f261958613b7ee5a0" exitCode=0 Dec 02 14:35:10 crc kubenswrapper[4902]: I1202 14:35:10.503174 4902 generic.go:334] "Generic (PLEG): container finished" podID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerID="377c4f65bd35a79aa651a1830b4cf49c3b1b0d04cfa5b27d66ca2ffe601db757" exitCode=0 Dec 02 14:35:10 crc kubenswrapper[4902]: I1202 14:35:10.503211 4902 generic.go:334] "Generic (PLEG): container finished" podID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerID="85eeb6d07896470dc279bd1a847bb07e307765bbb655f25a796af1cf61b990be" exitCode=0 Dec 02 14:35:10 crc kubenswrapper[4902]: I1202 14:35:10.503249 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0","Type":"ContainerDied","Data":"741b7773fa54e6bb38c4cc403804a3a6e0413880a7993b0f261958613b7ee5a0"} Dec 02 14:35:10 crc kubenswrapper[4902]: I1202 14:35:10.503279 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0","Type":"ContainerDied","Data":"377c4f65bd35a79aa651a1830b4cf49c3b1b0d04cfa5b27d66ca2ffe601db757"} Dec 02 14:35:10 crc kubenswrapper[4902]: I1202 14:35:10.503289 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0","Type":"ContainerDied","Data":"85eeb6d07896470dc279bd1a847bb07e307765bbb655f25a796af1cf61b990be"} Dec 02 14:35:11 crc kubenswrapper[4902]: I1202 14:35:11.610219 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.0.111:9090/-/ready\": dial tcp 10.217.0.111:9090: connect: connection refused" Dec 02 14:35:16 crc kubenswrapper[4902]: I1202 14:35:16.609896 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.0.111:9090/-/ready\": dial tcp 10.217.0.111:9090: connect: connection refused" Dec 02 14:35:20 crc kubenswrapper[4902]: E1202 14:35:20.214904 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-keystone:current-podified" Dec 02 14:35:20 crc kubenswrapper[4902]: E1202 14:35:20.215558 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:keystone-db-sync,Image:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,Command:[/bin/bash],Args:[-c keystone-manage db_sync],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/keystone/keystone.conf,SubPath:keystone.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dkg5t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42425,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42425,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-db-sync-8nfb5_openstack(90848399-1e3c-448f-a1c1-8dd64a608fdb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 14:35:20 crc kubenswrapper[4902]: E1202 14:35:20.216944 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"keystone-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/keystone-db-sync-8nfb5" podUID="90848399-1e3c-448f-a1c1-8dd64a608fdb" Dec 02 14:35:20 crc kubenswrapper[4902]: E1202 14:35:20.597764 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"keystone-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-keystone:current-podified\\\"\"" pod="openstack/keystone-db-sync-8nfb5" podUID="90848399-1e3c-448f-a1c1-8dd64a608fdb" Dec 02 14:35:24 crc kubenswrapper[4902]: I1202 14:35:24.610968 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.0.111:9090/-/ready\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 14:35:24 crc kubenswrapper[4902]: I1202 14:35:24.611921 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:26 crc kubenswrapper[4902]: E1202 14:35:26.563964 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Dec 02 14:35:26 crc kubenswrapper[4902]: E1202 14:35:26.564516 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lkhf8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-8gzqw_openstack(a7ca0ff1-b6ca-496c-b02e-71cc900b4433): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 14:35:26 crc kubenswrapper[4902]: E1202 14:35:26.565791 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-8gzqw" podUID="a7ca0ff1-b6ca-496c-b02e-71cc900b4433" Dec 02 14:35:26 crc kubenswrapper[4902]: E1202 14:35:26.675401 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-8gzqw" podUID="a7ca0ff1-b6ca-496c-b02e-71cc900b4433" Dec 02 14:35:27 crc kubenswrapper[4902]: E1202 14:35:27.162796 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.155:5001/podified-epoxy-centos9/openstack-watcher-api:watcher_latest" Dec 02 14:35:27 crc kubenswrapper[4902]: E1202 14:35:27.162847 4902 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.155:5001/podified-epoxy-centos9/openstack-watcher-api:watcher_latest" Dec 02 14:35:27 crc kubenswrapper[4902]: E1202 14:35:27.163013 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:watcher-db-sync,Image:38.102.83.155:5001/podified-epoxy-centos9/openstack-watcher-api:watcher_latest,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/watcher/watcher.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:watcher-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7nqr2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-db-sync-jb52r_openstack(f0782122-af51-4295-bfc8-fb4a0388cdd0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 14:35:27 crc kubenswrapper[4902]: E1202 14:35:27.164273 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/watcher-db-sync-jb52r" podUID="f0782122-af51-4295-bfc8-fb4a0388cdd0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.209988 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.361558 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-thanos-prometheus-http-client-file\") pod \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.361657 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-prometheus-metric-storage-rulefiles-0\") pod \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.361682 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-web-config\") pod \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.361771 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqwnr\" (UniqueName: \"kubernetes.io/projected/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-kube-api-access-jqwnr\") pod \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.362257 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" (UID: "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.362546 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") pod \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.362619 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-tls-assets\") pod \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.362667 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-config\") pod \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.362691 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-config-out\") pod \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\" (UID: \"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0\") " Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.362990 4902 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.368680 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-config-out" (OuterVolumeSpecName: "config-out") pod "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" (UID: "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.368732 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-kube-api-access-jqwnr" (OuterVolumeSpecName: "kube-api-access-jqwnr") pod "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" (UID: "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0"). InnerVolumeSpecName "kube-api-access-jqwnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.369735 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-config" (OuterVolumeSpecName: "config") pod "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" (UID: "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.372313 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" (UID: "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.372385 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" (UID: "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.397214 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-web-config" (OuterVolumeSpecName: "web-config") pod "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" (UID: "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.411783 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" (UID: "b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0"). InnerVolumeSpecName "pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.465205 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.465262 4902 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-config-out\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.465285 4902 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.465309 4902 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-web-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.465330 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqwnr\" (UniqueName: \"kubernetes.io/projected/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-kube-api-access-jqwnr\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.465394 4902 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") on node \"crc\" " Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.465415 4902 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.503312 4902 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.503479 4902 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145") on node "crc" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.567802 4902 reconciler_common.go:293] "Volume detached for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.681284 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0","Type":"ContainerDied","Data":"35554987d92791a21de7a4725afbe317607b3998f83a3d373a2c127c30c8e48c"} Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.681357 4902 scope.go:117] "RemoveContainer" containerID="741b7773fa54e6bb38c4cc403804a3a6e0413880a7993b0f261958613b7ee5a0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.681384 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:27 crc kubenswrapper[4902]: E1202 14:35:27.683890 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/podified-epoxy-centos9/openstack-watcher-api:watcher_latest\\\"\"" pod="openstack/watcher-db-sync-jb52r" podUID="f0782122-af51-4295-bfc8-fb4a0388cdd0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.710421 4902 scope.go:117] "RemoveContainer" containerID="377c4f65bd35a79aa651a1830b4cf49c3b1b0d04cfa5b27d66ca2ffe601db757" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.726993 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.741544 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.746758 4902 scope.go:117] "RemoveContainer" containerID="85eeb6d07896470dc279bd1a847bb07e307765bbb655f25a796af1cf61b990be" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.748194 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.801573 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 14:35:27 crc kubenswrapper[4902]: E1202 14:35:27.802043 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="config-reloader" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802065 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="config-reloader" Dec 02 14:35:27 crc kubenswrapper[4902]: E1202 14:35:27.802086 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="init-config-reloader" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802095 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="init-config-reloader" Dec 02 14:35:27 crc kubenswrapper[4902]: E1202 14:35:27.802111 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a94c2aa-4287-434c-bf10-99b25a39ce73" containerName="mariadb-account-create-update" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802119 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a94c2aa-4287-434c-bf10-99b25a39ce73" containerName="mariadb-account-create-update" Dec 02 14:35:27 crc kubenswrapper[4902]: E1202 14:35:27.802128 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="987ec49c-d8be-4d61-ba4a-7f5ee3016cf1" containerName="mariadb-database-create" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802136 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="987ec49c-d8be-4d61-ba4a-7f5ee3016cf1" containerName="mariadb-database-create" Dec 02 14:35:27 crc kubenswrapper[4902]: E1202 14:35:27.802160 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d07765f-4bd5-46d0-9955-ccbd681f556b" containerName="mariadb-database-create" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802170 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d07765f-4bd5-46d0-9955-ccbd681f556b" containerName="mariadb-database-create" Dec 02 14:35:27 crc kubenswrapper[4902]: E1202 14:35:27.802202 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33f979ec-84eb-43b4-813a-32c7506b868f" containerName="mariadb-account-create-update" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802210 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="33f979ec-84eb-43b4-813a-32c7506b868f" containerName="mariadb-account-create-update" Dec 02 14:35:27 crc kubenswrapper[4902]: E1202 14:35:27.802225 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="prometheus" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802233 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="prometheus" Dec 02 14:35:27 crc kubenswrapper[4902]: E1202 14:35:27.802248 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f1c4d14-f9ad-4470-8165-d9c6375be0bc" containerName="mariadb-database-create" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802256 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f1c4d14-f9ad-4470-8165-d9c6375be0bc" containerName="mariadb-database-create" Dec 02 14:35:27 crc kubenswrapper[4902]: E1202 14:35:27.802274 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="thanos-sidecar" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802281 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="thanos-sidecar" Dec 02 14:35:27 crc kubenswrapper[4902]: E1202 14:35:27.802293 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a752ac02-1cb9-4927-b81a-d466abb1d58e" containerName="mariadb-account-create-update" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802302 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="a752ac02-1cb9-4927-b81a-d466abb1d58e" containerName="mariadb-account-create-update" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802494 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="config-reloader" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802509 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="987ec49c-d8be-4d61-ba4a-7f5ee3016cf1" containerName="mariadb-database-create" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802520 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="thanos-sidecar" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802533 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d07765f-4bd5-46d0-9955-ccbd681f556b" containerName="mariadb-database-create" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802545 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a94c2aa-4287-434c-bf10-99b25a39ce73" containerName="mariadb-account-create-update" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802576 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="33f979ec-84eb-43b4-813a-32c7506b868f" containerName="mariadb-account-create-update" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802594 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f1c4d14-f9ad-4470-8165-d9c6375be0bc" containerName="mariadb-database-create" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802610 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="a752ac02-1cb9-4927-b81a-d466abb1d58e" containerName="mariadb-account-create-update" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.802622 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="prometheus" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.804540 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.810149 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.811107 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.811923 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.812062 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.812172 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-k2vng" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.812266 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.816349 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.816504 4902 scope.go:117] "RemoveContainer" containerID="aff94390a576c9473b54c81e832e792fd589295cff84eb9feebe60201bc82327" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.817281 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.980674 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.980737 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-config\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.980919 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.981008 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.981033 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f3ebb493-070e-44be-ae1e-b89464b9011f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.981091 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.981176 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f3ebb493-070e-44be-ae1e-b89464b9011f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.981323 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.981346 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w587l\" (UniqueName: \"kubernetes.io/projected/f3ebb493-070e-44be-ae1e-b89464b9011f-kube-api-access-w587l\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.981412 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f3ebb493-070e-44be-ae1e-b89464b9011f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:27 crc kubenswrapper[4902]: I1202 14:35:27.981470 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.083198 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.083792 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f3ebb493-070e-44be-ae1e-b89464b9011f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.083921 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.084052 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f3ebb493-070e-44be-ae1e-b89464b9011f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.084179 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w587l\" (UniqueName: \"kubernetes.io/projected/f3ebb493-070e-44be-ae1e-b89464b9011f-kube-api-access-w587l\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.084272 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.084388 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f3ebb493-070e-44be-ae1e-b89464b9011f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.084541 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.084703 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.084804 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-config\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.084926 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.084472 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f3ebb493-070e-44be-ae1e-b89464b9011f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.087692 4902 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.087732 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/665a64dfccf06cca52387316c6cee605d1c10e0fb3133ba2864d18ab912b518d/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.088753 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f3ebb493-070e-44be-ae1e-b89464b9011f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.088871 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.089438 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.089795 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.091196 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f3ebb493-070e-44be-ae1e-b89464b9011f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.092164 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.094543 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.101822 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-config\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.114149 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w587l\" (UniqueName: \"kubernetes.io/projected/f3ebb493-070e-44be-ae1e-b89464b9011f-kube-api-access-w587l\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.122265 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") pod \"prometheus-metric-storage-0\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.143599 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.639330 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 14:35:28 crc kubenswrapper[4902]: W1202 14:35:28.648700 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf3ebb493_070e_44be_ae1e_b89464b9011f.slice/crio-5bd9ae44a2412fad3e165b3c38eb9c8a31a54af8f395802a9f137f833e7d7788 WatchSource:0}: Error finding container 5bd9ae44a2412fad3e165b3c38eb9c8a31a54af8f395802a9f137f833e7d7788: Status 404 returned error can't find the container with id 5bd9ae44a2412fad3e165b3c38eb9c8a31a54af8f395802a9f137f833e7d7788 Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.694282 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"8f9d4b8e2e586e0115bd941ccdabe3b64b67d0e0b01a94915491c1cd14f44f60"} Dec 02 14:35:28 crc kubenswrapper[4902]: I1202 14:35:28.695141 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f3ebb493-070e-44be-ae1e-b89464b9011f","Type":"ContainerStarted","Data":"5bd9ae44a2412fad3e165b3c38eb9c8a31a54af8f395802a9f137f833e7d7788"} Dec 02 14:35:29 crc kubenswrapper[4902]: I1202 14:35:29.129478 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" path="/var/lib/kubelet/pods/b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0/volumes" Dec 02 14:35:29 crc kubenswrapper[4902]: I1202 14:35:29.610368 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="b9a60a5e-54e6-469f-b5b2-9cb6a90d0bc0" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.0.111:9090/-/ready\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 14:35:30 crc kubenswrapper[4902]: I1202 14:35:30.719195 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"77e79b1ca9a19a2875e85b831a0a13315d1139dc5086d436115f40f643129600"} Dec 02 14:35:30 crc kubenswrapper[4902]: I1202 14:35:30.719516 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"8e55b5dcb346fb68d167650307a576e997dd88cf8a5ddfd5680399f30950c289"} Dec 02 14:35:30 crc kubenswrapper[4902]: I1202 14:35:30.719529 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"a07950c7e2e2bbfff7a776fb1587875e674e42c223525f003a5e7ab60404e629"} Dec 02 14:35:30 crc kubenswrapper[4902]: I1202 14:35:30.719538 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"8a91db86f1202b3a5dbd57ac9ae4794d83d82fea820d716fc88665d4e5853e74"} Dec 02 14:35:31 crc kubenswrapper[4902]: I1202 14:35:31.729374 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f3ebb493-070e-44be-ae1e-b89464b9011f","Type":"ContainerStarted","Data":"9f98c6399543b6aeb0902271cbea14acf7f3eec0bae6d280d6de5237b1a46914"} Dec 02 14:35:32 crc kubenswrapper[4902]: I1202 14:35:32.742758 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"fa8bdf5b33c8a1d2a554a1fc8230e9846469784d467673e402357f4ef3eb6209"} Dec 02 14:35:32 crc kubenswrapper[4902]: I1202 14:35:32.743127 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"9529740dfea6bc8f4e022b1829157cd20775d4ef5415ccbc2e611ebfb1c131e9"} Dec 02 14:35:32 crc kubenswrapper[4902]: I1202 14:35:32.743140 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"4da2a8fb00c548c77f00aef3dbcf194032e6aae44eee65ee7594c2d9014e9e00"} Dec 02 14:35:32 crc kubenswrapper[4902]: I1202 14:35:32.743152 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"fb1a71234cfb94fa56c894ef3092e4aaff6fbe2f912f9ff27b04b953a837e635"} Dec 02 14:35:35 crc kubenswrapper[4902]: I1202 14:35:34.765820 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"b91f1764df68dfb7c46ab98da0c6af7d2e6fc603a6b6082353c45ff2013d667c"} Dec 02 14:35:35 crc kubenswrapper[4902]: I1202 14:35:34.766438 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"5701d4e52b418e560944c247600724a9a5fc7913168b2e6bff0e272be3fe13a9"} Dec 02 14:35:35 crc kubenswrapper[4902]: I1202 14:35:34.766452 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"669ba7a01a300370aee34713a1621ae35b60bd3d17a1dd28a364416ca795745c"} Dec 02 14:35:35 crc kubenswrapper[4902]: I1202 14:35:35.783202 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-8nfb5" event={"ID":"90848399-1e3c-448f-a1c1-8dd64a608fdb","Type":"ContainerStarted","Data":"67c9aacbc2361d46791baa6754d958c3fa0c997834114937bb2c888c3eaaf2b4"} Dec 02 14:35:35 crc kubenswrapper[4902]: I1202 14:35:35.798766 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"7b9dc7b1af48931d14597a44e9330fb94420b10177ce0ffe3066e6e30f20a195"} Dec 02 14:35:35 crc kubenswrapper[4902]: I1202 14:35:35.798833 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"63da39406f344f23f7a26bc1847db5ad678610632feffb0a2c74a021872f774e"} Dec 02 14:35:35 crc kubenswrapper[4902]: I1202 14:35:35.813965 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-8nfb5" podStartSLOduration=2.750206507 podStartE2EDuration="32.813936868s" podCreationTimestamp="2025-12-02 14:35:03 +0000 UTC" firstStartedPulling="2025-12-02 14:35:04.580173393 +0000 UTC m=+1135.771482112" lastFinishedPulling="2025-12-02 14:35:34.643903764 +0000 UTC m=+1165.835212473" observedRunningTime="2025-12-02 14:35:35.802744876 +0000 UTC m=+1166.994053585" watchObservedRunningTime="2025-12-02 14:35:35.813936868 +0000 UTC m=+1167.005245617" Dec 02 14:35:36 crc kubenswrapper[4902]: I1202 14:35:36.823998 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"860997b6adf60d5d3733af687de0ab66db6215737997108cf390f4e0a7f43d58"} Dec 02 14:35:36 crc kubenswrapper[4902]: I1202 14:35:36.824295 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"0a523405-44a7-49cc-ae19-25ebbdbc8d73","Type":"ContainerStarted","Data":"ef087d9c99c67aa72d9cc261cf8a0b79643a5c0191e8d592cc578f78e0244178"} Dec 02 14:35:36 crc kubenswrapper[4902]: I1202 14:35:36.859752 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=55.656783295 podStartE2EDuration="1m1.859737037s" podCreationTimestamp="2025-12-02 14:34:35 +0000 UTC" firstStartedPulling="2025-12-02 14:35:27.752296388 +0000 UTC m=+1158.943605107" lastFinishedPulling="2025-12-02 14:35:33.95525014 +0000 UTC m=+1165.146558849" observedRunningTime="2025-12-02 14:35:36.85840925 +0000 UTC m=+1168.049717999" watchObservedRunningTime="2025-12-02 14:35:36.859737037 +0000 UTC m=+1168.051045746" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.130929 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-vqd9x"] Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.132487 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.134523 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.150109 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-vqd9x"] Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.257001 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-dns-svc\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.257092 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.257142 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssjhj\" (UniqueName: \"kubernetes.io/projected/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-kube-api-access-ssjhj\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.257311 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.257692 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.257764 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-config\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.359544 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.359614 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.359649 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-config\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.359691 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-dns-svc\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.359718 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.359763 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssjhj\" (UniqueName: \"kubernetes.io/projected/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-kube-api-access-ssjhj\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.360775 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-config\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.360812 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.361285 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-dns-svc\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.361300 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.361323 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.382080 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssjhj\" (UniqueName: \"kubernetes.io/projected/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-kube-api-access-ssjhj\") pod \"dnsmasq-dns-764c5664d7-vqd9x\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.489868 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:37 crc kubenswrapper[4902]: I1202 14:35:37.934111 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-vqd9x"] Dec 02 14:35:38 crc kubenswrapper[4902]: I1202 14:35:38.851448 4902 generic.go:334] "Generic (PLEG): container finished" podID="9d95d682-63ae-429c-9ef3-aa1e4ab751bf" containerID="4e5941061fe5ccdb349904d2563991dcdc32344b13f0d888b76ff9b28e499f9e" exitCode=0 Dec 02 14:35:38 crc kubenswrapper[4902]: I1202 14:35:38.851527 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" event={"ID":"9d95d682-63ae-429c-9ef3-aa1e4ab751bf","Type":"ContainerDied","Data":"4e5941061fe5ccdb349904d2563991dcdc32344b13f0d888b76ff9b28e499f9e"} Dec 02 14:35:38 crc kubenswrapper[4902]: I1202 14:35:38.851921 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" event={"ID":"9d95d682-63ae-429c-9ef3-aa1e4ab751bf","Type":"ContainerStarted","Data":"2665d1455c9185fd4caf369fea2aebb8cbfa7c21838e0cb601a10b00fb1bdd9a"} Dec 02 14:35:38 crc kubenswrapper[4902]: I1202 14:35:38.854602 4902 generic.go:334] "Generic (PLEG): container finished" podID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerID="9f98c6399543b6aeb0902271cbea14acf7f3eec0bae6d280d6de5237b1a46914" exitCode=0 Dec 02 14:35:38 crc kubenswrapper[4902]: I1202 14:35:38.854667 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f3ebb493-070e-44be-ae1e-b89464b9011f","Type":"ContainerDied","Data":"9f98c6399543b6aeb0902271cbea14acf7f3eec0bae6d280d6de5237b1a46914"} Dec 02 14:35:38 crc kubenswrapper[4902]: I1202 14:35:38.856731 4902 generic.go:334] "Generic (PLEG): container finished" podID="90848399-1e3c-448f-a1c1-8dd64a608fdb" containerID="67c9aacbc2361d46791baa6754d958c3fa0c997834114937bb2c888c3eaaf2b4" exitCode=0 Dec 02 14:35:38 crc kubenswrapper[4902]: I1202 14:35:38.856784 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-8nfb5" event={"ID":"90848399-1e3c-448f-a1c1-8dd64a608fdb","Type":"ContainerDied","Data":"67c9aacbc2361d46791baa6754d958c3fa0c997834114937bb2c888c3eaaf2b4"} Dec 02 14:35:38 crc kubenswrapper[4902]: I1202 14:35:38.861963 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-jb52r" event={"ID":"f0782122-af51-4295-bfc8-fb4a0388cdd0","Type":"ContainerStarted","Data":"325ce819002deb3f3e6afc1ff534c1d379ad8f8ed5a3b731059a4296080f866f"} Dec 02 14:35:38 crc kubenswrapper[4902]: I1202 14:35:38.949482 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-db-sync-jb52r" podStartSLOduration=2.526338053 podStartE2EDuration="38.949453229s" podCreationTimestamp="2025-12-02 14:35:00 +0000 UTC" firstStartedPulling="2025-12-02 14:35:01.745401712 +0000 UTC m=+1132.936710421" lastFinishedPulling="2025-12-02 14:35:38.168516878 +0000 UTC m=+1169.359825597" observedRunningTime="2025-12-02 14:35:38.941279509 +0000 UTC m=+1170.132588238" watchObservedRunningTime="2025-12-02 14:35:38.949453229 +0000 UTC m=+1170.140761948" Dec 02 14:35:39 crc kubenswrapper[4902]: I1202 14:35:39.886295 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" event={"ID":"9d95d682-63ae-429c-9ef3-aa1e4ab751bf","Type":"ContainerStarted","Data":"32ed45bd5c18368fea5b684afe5f88e0802d5b93546257cf1469bba96ef70689"} Dec 02 14:35:39 crc kubenswrapper[4902]: I1202 14:35:39.886888 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:39 crc kubenswrapper[4902]: I1202 14:35:39.890375 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f3ebb493-070e-44be-ae1e-b89464b9011f","Type":"ContainerStarted","Data":"5a65e34a3f138b9b9885263c5f6096d72dd6924e2ab5b59378dd1482d06a4aad"} Dec 02 14:35:39 crc kubenswrapper[4902]: I1202 14:35:39.925877 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" podStartSLOduration=2.925858296 podStartE2EDuration="2.925858296s" podCreationTimestamp="2025-12-02 14:35:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:35:39.923828099 +0000 UTC m=+1171.115136808" watchObservedRunningTime="2025-12-02 14:35:39.925858296 +0000 UTC m=+1171.117167005" Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.184550 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-8nfb5" Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.317626 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90848399-1e3c-448f-a1c1-8dd64a608fdb-combined-ca-bundle\") pod \"90848399-1e3c-448f-a1c1-8dd64a608fdb\" (UID: \"90848399-1e3c-448f-a1c1-8dd64a608fdb\") " Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.317705 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkg5t\" (UniqueName: \"kubernetes.io/projected/90848399-1e3c-448f-a1c1-8dd64a608fdb-kube-api-access-dkg5t\") pod \"90848399-1e3c-448f-a1c1-8dd64a608fdb\" (UID: \"90848399-1e3c-448f-a1c1-8dd64a608fdb\") " Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.317786 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90848399-1e3c-448f-a1c1-8dd64a608fdb-config-data\") pod \"90848399-1e3c-448f-a1c1-8dd64a608fdb\" (UID: \"90848399-1e3c-448f-a1c1-8dd64a608fdb\") " Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.322946 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90848399-1e3c-448f-a1c1-8dd64a608fdb-kube-api-access-dkg5t" (OuterVolumeSpecName: "kube-api-access-dkg5t") pod "90848399-1e3c-448f-a1c1-8dd64a608fdb" (UID: "90848399-1e3c-448f-a1c1-8dd64a608fdb"). InnerVolumeSpecName "kube-api-access-dkg5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.343354 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90848399-1e3c-448f-a1c1-8dd64a608fdb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "90848399-1e3c-448f-a1c1-8dd64a608fdb" (UID: "90848399-1e3c-448f-a1c1-8dd64a608fdb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.380634 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90848399-1e3c-448f-a1c1-8dd64a608fdb-config-data" (OuterVolumeSpecName: "config-data") pod "90848399-1e3c-448f-a1c1-8dd64a608fdb" (UID: "90848399-1e3c-448f-a1c1-8dd64a608fdb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.420511 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkg5t\" (UniqueName: \"kubernetes.io/projected/90848399-1e3c-448f-a1c1-8dd64a608fdb-kube-api-access-dkg5t\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.420547 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90848399-1e3c-448f-a1c1-8dd64a608fdb-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.420558 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90848399-1e3c-448f-a1c1-8dd64a608fdb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.907902 4902 generic.go:334] "Generic (PLEG): container finished" podID="f0782122-af51-4295-bfc8-fb4a0388cdd0" containerID="325ce819002deb3f3e6afc1ff534c1d379ad8f8ed5a3b731059a4296080f866f" exitCode=0 Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.908904 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-jb52r" event={"ID":"f0782122-af51-4295-bfc8-fb4a0388cdd0","Type":"ContainerDied","Data":"325ce819002deb3f3e6afc1ff534c1d379ad8f8ed5a3b731059a4296080f866f"} Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.912093 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8gzqw" event={"ID":"a7ca0ff1-b6ca-496c-b02e-71cc900b4433","Type":"ContainerStarted","Data":"06603acd7ccdef64ab15b6c4d400dd45df7d55e7572ab85d4c7e59e2e97f44d0"} Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.922740 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-8nfb5" Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.923249 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-8nfb5" event={"ID":"90848399-1e3c-448f-a1c1-8dd64a608fdb","Type":"ContainerDied","Data":"a3b1c4bcfd9b7b56054c1c007b8e59475da3ebed82328af071aeb1b127644b2b"} Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.926997 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a3b1c4bcfd9b7b56054c1c007b8e59475da3ebed82328af071aeb1b127644b2b" Dec 02 14:35:40 crc kubenswrapper[4902]: I1202 14:35:40.954915 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-8gzqw" podStartSLOduration=3.416149013 podStartE2EDuration="37.954894872s" podCreationTimestamp="2025-12-02 14:35:03 +0000 UTC" firstStartedPulling="2025-12-02 14:35:05.153961271 +0000 UTC m=+1136.345269980" lastFinishedPulling="2025-12-02 14:35:39.69270712 +0000 UTC m=+1170.884015839" observedRunningTime="2025-12-02 14:35:40.947272708 +0000 UTC m=+1172.138581417" watchObservedRunningTime="2025-12-02 14:35:40.954894872 +0000 UTC m=+1172.146203581" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.153448 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-vqd9x"] Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.188783 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-f5597"] Dec 02 14:35:41 crc kubenswrapper[4902]: E1202 14:35:41.189194 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90848399-1e3c-448f-a1c1-8dd64a608fdb" containerName="keystone-db-sync" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.189208 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="90848399-1e3c-448f-a1c1-8dd64a608fdb" containerName="keystone-db-sync" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.189417 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="90848399-1e3c-448f-a1c1-8dd64a608fdb" containerName="keystone-db-sync" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.190168 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.193005 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.193208 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.193488 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.194350 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-5shcs" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.196227 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.216658 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-hrcjj"] Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.218346 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.249349 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-f5597"] Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.257880 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-hrcjj"] Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.335269 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-combined-ca-bundle\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.335317 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.335347 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-credential-keys\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.335372 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-scripts\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.335391 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-fernet-keys\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.335416 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nf4ff\" (UniqueName: \"kubernetes.io/projected/9de2acde-b30a-4bb5-afdb-a56374e82d10-kube-api-access-nf4ff\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.335453 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-dns-svc\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.335483 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.335532 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-config-data\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.335558 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-config\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.335680 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4sdx\" (UniqueName: \"kubernetes.io/projected/49f4dba9-30ea-4a41-a1e8-e88182dbb669-kube-api-access-n4sdx\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.335795 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.437169 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nf4ff\" (UniqueName: \"kubernetes.io/projected/9de2acde-b30a-4bb5-afdb-a56374e82d10-kube-api-access-nf4ff\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.437258 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-dns-svc\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.437312 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.437372 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-config-data\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.437399 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-config\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.437433 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4sdx\" (UniqueName: \"kubernetes.io/projected/49f4dba9-30ea-4a41-a1e8-e88182dbb669-kube-api-access-n4sdx\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.437463 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.437520 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-combined-ca-bundle\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.437541 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.437582 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-credential-keys\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.437604 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-scripts\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.437626 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-fernet-keys\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.439057 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.439138 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.439194 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-config\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.439732 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-dns-svc\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.440031 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.460460 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-ff5b6d47-dgpgd"] Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.467091 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.470541 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-combined-ca-bundle\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.470734 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-config-data\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.471410 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-fernet-keys\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.475416 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-credential-keys\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.479505 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-scripts\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.481042 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.492244 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-sc47g" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.492507 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.494814 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.509459 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nf4ff\" (UniqueName: \"kubernetes.io/projected/9de2acde-b30a-4bb5-afdb-a56374e82d10-kube-api-access-nf4ff\") pod \"keystone-bootstrap-f5597\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.514698 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4sdx\" (UniqueName: \"kubernetes.io/projected/49f4dba9-30ea-4a41-a1e8-e88182dbb669-kube-api-access-n4sdx\") pod \"dnsmasq-dns-5959f8865f-hrcjj\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.517348 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.548923 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/318a2e77-a615-4af8-bade-d574dd90ca91-scripts\") pod \"horizon-ff5b6d47-dgpgd\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.549076 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/318a2e77-a615-4af8-bade-d574dd90ca91-config-data\") pod \"horizon-ff5b6d47-dgpgd\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.549113 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wql4d\" (UniqueName: \"kubernetes.io/projected/318a2e77-a615-4af8-bade-d574dd90ca91-kube-api-access-wql4d\") pod \"horizon-ff5b6d47-dgpgd\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.549131 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/318a2e77-a615-4af8-bade-d574dd90ca91-logs\") pod \"horizon-ff5b6d47-dgpgd\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.549153 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/318a2e77-a615-4af8-bade-d574dd90ca91-horizon-secret-key\") pod \"horizon-ff5b6d47-dgpgd\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.552029 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.556647 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-ff5b6d47-dgpgd"] Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.603799 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-rgnm2"] Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.608733 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.631089 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.631279 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-n6scg" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.631399 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.653216 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/318a2e77-a615-4af8-bade-d574dd90ca91-config-data\") pod \"horizon-ff5b6d47-dgpgd\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.653281 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wql4d\" (UniqueName: \"kubernetes.io/projected/318a2e77-a615-4af8-bade-d574dd90ca91-kube-api-access-wql4d\") pod \"horizon-ff5b6d47-dgpgd\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.653307 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/318a2e77-a615-4af8-bade-d574dd90ca91-horizon-secret-key\") pod \"horizon-ff5b6d47-dgpgd\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.653324 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/318a2e77-a615-4af8-bade-d574dd90ca91-logs\") pod \"horizon-ff5b6d47-dgpgd\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.653365 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/318a2e77-a615-4af8-bade-d574dd90ca91-scripts\") pod \"horizon-ff5b6d47-dgpgd\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.654301 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/318a2e77-a615-4af8-bade-d574dd90ca91-scripts\") pod \"horizon-ff5b6d47-dgpgd\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.655194 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/318a2e77-a615-4af8-bade-d574dd90ca91-config-data\") pod \"horizon-ff5b6d47-dgpgd\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.661059 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/318a2e77-a615-4af8-bade-d574dd90ca91-logs\") pod \"horizon-ff5b6d47-dgpgd\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.664000 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/318a2e77-a615-4af8-bade-d574dd90ca91-horizon-secret-key\") pod \"horizon-ff5b6d47-dgpgd\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.667039 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-rgnm2"] Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.756523 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9rsp\" (UniqueName: \"kubernetes.io/projected/52b3cd40-3726-4b52-8e64-8b15f5f02a99-kube-api-access-w9rsp\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.757658 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-scripts\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.757739 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-db-sync-config-data\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.763192 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-combined-ca-bundle\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.763236 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-config-data\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.763320 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/52b3cd40-3726-4b52-8e64-8b15f5f02a99-etc-machine-id\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.765984 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-gmq8h"] Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.770209 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gmq8h" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.792262 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.792504 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.792689 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-n6cc5" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.809096 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-gmq8h"] Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.833887 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.844088 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.859018 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.859740 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.864546 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b7c64325-9e55-496c-b643-2e053ee69bdb-config\") pod \"neutron-db-sync-gmq8h\" (UID: \"b7c64325-9e55-496c-b643-2e053ee69bdb\") " pod="openstack/neutron-db-sync-gmq8h" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.864629 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/52b3cd40-3726-4b52-8e64-8b15f5f02a99-etc-machine-id\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.864652 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2fpw\" (UniqueName: \"kubernetes.io/projected/b7c64325-9e55-496c-b643-2e053ee69bdb-kube-api-access-h2fpw\") pod \"neutron-db-sync-gmq8h\" (UID: \"b7c64325-9e55-496c-b643-2e053ee69bdb\") " pod="openstack/neutron-db-sync-gmq8h" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.864759 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9rsp\" (UniqueName: \"kubernetes.io/projected/52b3cd40-3726-4b52-8e64-8b15f5f02a99-kube-api-access-w9rsp\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.864784 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7c64325-9e55-496c-b643-2e053ee69bdb-combined-ca-bundle\") pod \"neutron-db-sync-gmq8h\" (UID: \"b7c64325-9e55-496c-b643-2e053ee69bdb\") " pod="openstack/neutron-db-sync-gmq8h" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.864806 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-scripts\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.864839 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-db-sync-config-data\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.864861 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-combined-ca-bundle\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.864881 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-config-data\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.865353 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/52b3cd40-3726-4b52-8e64-8b15f5f02a99-etc-machine-id\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.916619 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wql4d\" (UniqueName: \"kubernetes.io/projected/318a2e77-a615-4af8-bade-d574dd90ca91-kube-api-access-wql4d\") pod \"horizon-ff5b6d47-dgpgd\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.938193 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9rsp\" (UniqueName: \"kubernetes.io/projected/52b3cd40-3726-4b52-8e64-8b15f5f02a99-kube-api-access-w9rsp\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.945131 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-scripts\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.946185 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-db-sync-config-data\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.952253 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-combined-ca-bundle\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.953005 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-config-data\") pod \"cinder-db-sync-rgnm2\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.954620 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.965081 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" podUID="9d95d682-63ae-429c-9ef3-aa1e4ab751bf" containerName="dnsmasq-dns" containerID="cri-o://32ed45bd5c18368fea5b684afe5f88e0802d5b93546257cf1469bba96ef70689" gracePeriod=10 Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.974128 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-886wf\" (UniqueName: \"kubernetes.io/projected/97eb051a-3187-447f-8bf6-71ae3d8f65d7-kube-api-access-886wf\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.974207 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.974292 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b7c64325-9e55-496c-b643-2e053ee69bdb-config\") pod \"neutron-db-sync-gmq8h\" (UID: \"b7c64325-9e55-496c-b643-2e053ee69bdb\") " pod="openstack/neutron-db-sync-gmq8h" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.974312 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-config-data\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.974346 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2fpw\" (UniqueName: \"kubernetes.io/projected/b7c64325-9e55-496c-b643-2e053ee69bdb-kube-api-access-h2fpw\") pod \"neutron-db-sync-gmq8h\" (UID: \"b7c64325-9e55-496c-b643-2e053ee69bdb\") " pod="openstack/neutron-db-sync-gmq8h" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.974366 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97eb051a-3187-447f-8bf6-71ae3d8f65d7-log-httpd\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.974396 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97eb051a-3187-447f-8bf6-71ae3d8f65d7-run-httpd\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.974461 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.974514 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7c64325-9e55-496c-b643-2e053ee69bdb-combined-ca-bundle\") pod \"neutron-db-sync-gmq8h\" (UID: \"b7c64325-9e55-496c-b643-2e053ee69bdb\") " pod="openstack/neutron-db-sync-gmq8h" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.974585 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-scripts\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.987432 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-q9f9f"] Dec 02 14:35:41 crc kubenswrapper[4902]: I1202 14:35:41.988642 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.003962 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7c64325-9e55-496c-b643-2e053ee69bdb-combined-ca-bundle\") pod \"neutron-db-sync-gmq8h\" (UID: \"b7c64325-9e55-496c-b643-2e053ee69bdb\") " pod="openstack/neutron-db-sync-gmq8h" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.005541 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.005993 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-gfvx4" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.006145 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.006866 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b7c64325-9e55-496c-b643-2e053ee69bdb-config\") pod \"neutron-db-sync-gmq8h\" (UID: \"b7c64325-9e55-496c-b643-2e053ee69bdb\") " pod="openstack/neutron-db-sync-gmq8h" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.035705 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2fpw\" (UniqueName: \"kubernetes.io/projected/b7c64325-9e55-496c-b643-2e053ee69bdb-kube-api-access-h2fpw\") pod \"neutron-db-sync-gmq8h\" (UID: \"b7c64325-9e55-496c-b643-2e053ee69bdb\") " pod="openstack/neutron-db-sync-gmq8h" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.047543 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.059753 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-q9f9f"] Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.063836 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.070948 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-7n2ss"] Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.079460 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.079541 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-scripts\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.079603 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-886wf\" (UniqueName: \"kubernetes.io/projected/97eb051a-3187-447f-8bf6-71ae3d8f65d7-kube-api-access-886wf\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.079633 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.079688 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-config-data\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.079714 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97eb051a-3187-447f-8bf6-71ae3d8f65d7-log-httpd\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.079736 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97eb051a-3187-447f-8bf6-71ae3d8f65d7-run-httpd\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.080141 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97eb051a-3187-447f-8bf6-71ae3d8f65d7-run-httpd\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.092250 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6cdbd8859c-g8rmc"] Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.093918 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97eb051a-3187-447f-8bf6-71ae3d8f65d7-log-httpd\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.094419 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7n2ss" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.094604 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.105179 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-7dp8d" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.105509 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.112788 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.113003 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.119726 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-scripts\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.129830 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-7n2ss"] Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.139297 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-config-data\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.141056 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gmq8h" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.149411 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-886wf\" (UniqueName: \"kubernetes.io/projected/97eb051a-3187-447f-8bf6-71ae3d8f65d7-kube-api-access-886wf\") pod \"ceilometer-0\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " pod="openstack/ceilometer-0" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.180956 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-horizon-secret-key\") pod \"horizon-6cdbd8859c-g8rmc\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.181007 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6k2rq\" (UniqueName: \"kubernetes.io/projected/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-kube-api-access-6k2rq\") pod \"placement-db-sync-q9f9f\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.181032 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-scripts\") pod \"horizon-6cdbd8859c-g8rmc\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.181074 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-logs\") pod \"horizon-6cdbd8859c-g8rmc\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.181105 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-config-data\") pod \"placement-db-sync-q9f9f\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.181145 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5qgb\" (UniqueName: \"kubernetes.io/projected/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-kube-api-access-r5qgb\") pod \"barbican-db-sync-7n2ss\" (UID: \"9c39a4fe-20e0-458b-a226-d6f5ad9cb846\") " pod="openstack/barbican-db-sync-7n2ss" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.181167 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-combined-ca-bundle\") pod \"placement-db-sync-q9f9f\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.181197 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-logs\") pod \"placement-db-sync-q9f9f\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.181329 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-combined-ca-bundle\") pod \"barbican-db-sync-7n2ss\" (UID: \"9c39a4fe-20e0-458b-a226-d6f5ad9cb846\") " pod="openstack/barbican-db-sync-7n2ss" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.181392 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-config-data\") pod \"horizon-6cdbd8859c-g8rmc\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.181430 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-db-sync-config-data\") pod \"barbican-db-sync-7n2ss\" (UID: \"9c39a4fe-20e0-458b-a226-d6f5ad9cb846\") " pod="openstack/barbican-db-sync-7n2ss" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.181448 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sctsh\" (UniqueName: \"kubernetes.io/projected/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-kube-api-access-sctsh\") pod \"horizon-6cdbd8859c-g8rmc\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.181468 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-scripts\") pod \"placement-db-sync-q9f9f\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.193420 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-hrcjj"] Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.205336 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6cdbd8859c-g8rmc"] Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.244872 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-pprmb"] Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.246693 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.255634 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.260138 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-pprmb"] Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.283075 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-config-data\") pod \"horizon-6cdbd8859c-g8rmc\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.283137 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-db-sync-config-data\") pod \"barbican-db-sync-7n2ss\" (UID: \"9c39a4fe-20e0-458b-a226-d6f5ad9cb846\") " pod="openstack/barbican-db-sync-7n2ss" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.283155 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sctsh\" (UniqueName: \"kubernetes.io/projected/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-kube-api-access-sctsh\") pod \"horizon-6cdbd8859c-g8rmc\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.283175 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-scripts\") pod \"placement-db-sync-q9f9f\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.283204 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-horizon-secret-key\") pod \"horizon-6cdbd8859c-g8rmc\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.283226 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6k2rq\" (UniqueName: \"kubernetes.io/projected/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-kube-api-access-6k2rq\") pod \"placement-db-sync-q9f9f\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.283249 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-scripts\") pod \"horizon-6cdbd8859c-g8rmc\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.283287 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-logs\") pod \"horizon-6cdbd8859c-g8rmc\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.283308 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-config-data\") pod \"placement-db-sync-q9f9f\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.283330 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5qgb\" (UniqueName: \"kubernetes.io/projected/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-kube-api-access-r5qgb\") pod \"barbican-db-sync-7n2ss\" (UID: \"9c39a4fe-20e0-458b-a226-d6f5ad9cb846\") " pod="openstack/barbican-db-sync-7n2ss" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.283350 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-combined-ca-bundle\") pod \"placement-db-sync-q9f9f\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.283378 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-logs\") pod \"placement-db-sync-q9f9f\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.283405 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-combined-ca-bundle\") pod \"barbican-db-sync-7n2ss\" (UID: \"9c39a4fe-20e0-458b-a226-d6f5ad9cb846\") " pod="openstack/barbican-db-sync-7n2ss" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.285501 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-config-data\") pod \"horizon-6cdbd8859c-g8rmc\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.287543 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-scripts\") pod \"horizon-6cdbd8859c-g8rmc\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.287837 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-logs\") pod \"horizon-6cdbd8859c-g8rmc\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.288961 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-combined-ca-bundle\") pod \"barbican-db-sync-7n2ss\" (UID: \"9c39a4fe-20e0-458b-a226-d6f5ad9cb846\") " pod="openstack/barbican-db-sync-7n2ss" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.289381 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-logs\") pod \"placement-db-sync-q9f9f\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.290191 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-db-sync-config-data\") pod \"barbican-db-sync-7n2ss\" (UID: \"9c39a4fe-20e0-458b-a226-d6f5ad9cb846\") " pod="openstack/barbican-db-sync-7n2ss" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.305351 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-horizon-secret-key\") pod \"horizon-6cdbd8859c-g8rmc\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.305494 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-scripts\") pod \"placement-db-sync-q9f9f\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.305582 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-config-data\") pod \"placement-db-sync-q9f9f\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.306018 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-combined-ca-bundle\") pod \"placement-db-sync-q9f9f\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.310019 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6k2rq\" (UniqueName: \"kubernetes.io/projected/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-kube-api-access-6k2rq\") pod \"placement-db-sync-q9f9f\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.310078 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5qgb\" (UniqueName: \"kubernetes.io/projected/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-kube-api-access-r5qgb\") pod \"barbican-db-sync-7n2ss\" (UID: \"9c39a4fe-20e0-458b-a226-d6f5ad9cb846\") " pod="openstack/barbican-db-sync-7n2ss" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.313026 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sctsh\" (UniqueName: \"kubernetes.io/projected/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-kube-api-access-sctsh\") pod \"horizon-6cdbd8859c-g8rmc\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.368026 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-q9f9f" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.385974 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.386093 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.386149 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-config\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.386257 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.386348 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.386413 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl8ql\" (UniqueName: \"kubernetes.io/projected/e30c107c-9639-41d6-a4de-283d44a0c7de-kube-api-access-cl8ql\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.447593 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7n2ss" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.488757 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.488883 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.488917 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl8ql\" (UniqueName: \"kubernetes.io/projected/e30c107c-9639-41d6-a4de-283d44a0c7de-kube-api-access-cl8ql\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.489044 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.489086 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.489132 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-config\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.490477 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-config\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.494274 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.494271 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.494549 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.494674 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.500772 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.540275 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl8ql\" (UniqueName: \"kubernetes.io/projected/e30c107c-9639-41d6-a4de-283d44a0c7de-kube-api-access-cl8ql\") pod \"dnsmasq-dns-58dd9ff6bc-pprmb\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.573039 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.889938 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.977673 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-f5597"] Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.990331 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-hrcjj"] Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.990949 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-jb52r" event={"ID":"f0782122-af51-4295-bfc8-fb4a0388cdd0","Type":"ContainerDied","Data":"f31c546ed51c674c746b316bb09de01a4f5e161d1b5e15c1c11d6694a914cef2"} Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.991019 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f31c546ed51c674c746b316bb09de01a4f5e161d1b5e15c1c11d6694a914cef2" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.991094 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-jb52r" Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.999400 4902 generic.go:334] "Generic (PLEG): container finished" podID="9d95d682-63ae-429c-9ef3-aa1e4ab751bf" containerID="32ed45bd5c18368fea5b684afe5f88e0802d5b93546257cf1469bba96ef70689" exitCode=0 Dec 02 14:35:42 crc kubenswrapper[4902]: I1202 14:35:42.999533 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" event={"ID":"9d95d682-63ae-429c-9ef3-aa1e4ab751bf","Type":"ContainerDied","Data":"32ed45bd5c18368fea5b684afe5f88e0802d5b93546257cf1469bba96ef70689"} Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.006909 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-combined-ca-bundle\") pod \"f0782122-af51-4295-bfc8-fb4a0388cdd0\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.007158 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7nqr2\" (UniqueName: \"kubernetes.io/projected/f0782122-af51-4295-bfc8-fb4a0388cdd0-kube-api-access-7nqr2\") pod \"f0782122-af51-4295-bfc8-fb4a0388cdd0\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.007365 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-db-sync-config-data\") pod \"f0782122-af51-4295-bfc8-fb4a0388cdd0\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.007507 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-config-data\") pod \"f0782122-af51-4295-bfc8-fb4a0388cdd0\" (UID: \"f0782122-af51-4295-bfc8-fb4a0388cdd0\") " Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.019758 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f0782122-af51-4295-bfc8-fb4a0388cdd0" (UID: "f0782122-af51-4295-bfc8-fb4a0388cdd0"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.021471 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0782122-af51-4295-bfc8-fb4a0388cdd0-kube-api-access-7nqr2" (OuterVolumeSpecName: "kube-api-access-7nqr2") pod "f0782122-af51-4295-bfc8-fb4a0388cdd0" (UID: "f0782122-af51-4295-bfc8-fb4a0388cdd0"). InnerVolumeSpecName "kube-api-access-7nqr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.022287 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f3ebb493-070e-44be-ae1e-b89464b9011f","Type":"ContainerStarted","Data":"dd3df0b598e7c1cd68317da2c80ba119932a2975a8d3359e8e0f97b19960a398"} Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.022333 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f3ebb493-070e-44be-ae1e-b89464b9011f","Type":"ContainerStarted","Data":"777275b52e88fccb21ce0d89ccb2d4aca2b28a9de09e42ae43f817e4c0ea1d5b"} Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.088082 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=16.088063089 podStartE2EDuration="16.088063089s" podCreationTimestamp="2025-12-02 14:35:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:35:43.082242875 +0000 UTC m=+1174.273551574" watchObservedRunningTime="2025-12-02 14:35:43.088063089 +0000 UTC m=+1174.279371798" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.095868 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f0782122-af51-4295-bfc8-fb4a0388cdd0" (UID: "f0782122-af51-4295-bfc8-fb4a0388cdd0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.137293 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.137721 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7nqr2\" (UniqueName: \"kubernetes.io/projected/f0782122-af51-4295-bfc8-fb4a0388cdd0-kube-api-access-7nqr2\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.138985 4902 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.146888 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-config-data" (OuterVolumeSpecName: "config-data") pod "f0782122-af51-4295-bfc8-fb4a0388cdd0" (UID: "f0782122-af51-4295-bfc8-fb4a0388cdd0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.157642 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.157687 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.167698 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.184702 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.240196 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0782122-af51-4295-bfc8-fb4a0388cdd0-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.304233 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-applier-0"] Dec 02 14:35:43 crc kubenswrapper[4902]: E1202 14:35:43.304625 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0782122-af51-4295-bfc8-fb4a0388cdd0" containerName="watcher-db-sync" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.304638 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0782122-af51-4295-bfc8-fb4a0388cdd0" containerName="watcher-db-sync" Dec 02 14:35:43 crc kubenswrapper[4902]: E1202 14:35:43.304655 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d95d682-63ae-429c-9ef3-aa1e4ab751bf" containerName="init" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.304661 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d95d682-63ae-429c-9ef3-aa1e4ab751bf" containerName="init" Dec 02 14:35:43 crc kubenswrapper[4902]: E1202 14:35:43.304672 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d95d682-63ae-429c-9ef3-aa1e4ab751bf" containerName="dnsmasq-dns" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.304677 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d95d682-63ae-429c-9ef3-aa1e4ab751bf" containerName="dnsmasq-dns" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.304851 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0782122-af51-4295-bfc8-fb4a0388cdd0" containerName="watcher-db-sync" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.304869 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d95d682-63ae-429c-9ef3-aa1e4ab751bf" containerName="dnsmasq-dns" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.305470 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.310184 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-2w9hl" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.310247 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-applier-config-data" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.334204 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.348000 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-dns-swift-storage-0\") pod \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.348124 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssjhj\" (UniqueName: \"kubernetes.io/projected/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-kube-api-access-ssjhj\") pod \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.348143 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-dns-svc\") pod \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.348173 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-ovsdbserver-sb\") pod \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.348196 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-ovsdbserver-nb\") pod \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.348254 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-config\") pod \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.362618 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-kube-api-access-ssjhj" (OuterVolumeSpecName: "kube-api-access-ssjhj") pod "9d95d682-63ae-429c-9ef3-aa1e4ab751bf" (UID: "9d95d682-63ae-429c-9ef3-aa1e4ab751bf"). InnerVolumeSpecName "kube-api-access-ssjhj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.384285 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.395421 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.408058 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.427234 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.440721 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.442625 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.445649 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.451299 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvr6g\" (UniqueName: \"kubernetes.io/projected/66b2b80e-c31b-4574-a218-26165964598a-kube-api-access-jvr6g\") pod \"watcher-applier-0\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " pod="openstack/watcher-applier-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.451357 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66b2b80e-c31b-4574-a218-26165964598a-config-data\") pod \"watcher-applier-0\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " pod="openstack/watcher-applier-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.451404 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66b2b80e-c31b-4574-a218-26165964598a-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " pod="openstack/watcher-applier-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.451422 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66b2b80e-c31b-4574-a218-26165964598a-logs\") pod \"watcher-applier-0\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " pod="openstack/watcher-applier-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.451466 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssjhj\" (UniqueName: \"kubernetes.io/projected/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-kube-api-access-ssjhj\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.510082 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.566713 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwvmf\" (UniqueName: \"kubernetes.io/projected/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-kube-api-access-zwvmf\") pod \"watcher-api-0\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.566874 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvr6g\" (UniqueName: \"kubernetes.io/projected/66b2b80e-c31b-4574-a218-26165964598a-kube-api-access-jvr6g\") pod \"watcher-applier-0\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " pod="openstack/watcher-applier-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.566980 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4p59\" (UniqueName: \"kubernetes.io/projected/04345ae3-20db-4b4d-bf7d-37daf1f39660-kube-api-access-c4p59\") pod \"watcher-decision-engine-0\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.567018 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66b2b80e-c31b-4574-a218-26165964598a-config-data\") pod \"watcher-applier-0\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " pod="openstack/watcher-applier-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.567086 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.567159 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-config-data\") pod \"watcher-decision-engine-0\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.567204 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04345ae3-20db-4b4d-bf7d-37daf1f39660-logs\") pod \"watcher-decision-engine-0\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.567234 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66b2b80e-c31b-4574-a218-26165964598a-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " pod="openstack/watcher-applier-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.567265 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66b2b80e-c31b-4574-a218-26165964598a-logs\") pod \"watcher-applier-0\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " pod="openstack/watcher-applier-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.567319 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.567351 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-logs\") pod \"watcher-api-0\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.567384 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.567421 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-config-data\") pod \"watcher-api-0\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.567509 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.569740 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-rgnm2"] Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.580616 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-ff5b6d47-dgpgd"] Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.590212 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66b2b80e-c31b-4574-a218-26165964598a-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " pod="openstack/watcher-applier-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.590599 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66b2b80e-c31b-4574-a218-26165964598a-logs\") pod \"watcher-applier-0\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " pod="openstack/watcher-applier-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.602296 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6cdbd8859c-g8rmc"] Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.615226 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66b2b80e-c31b-4574-a218-26165964598a-config-data\") pod \"watcher-applier-0\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " pod="openstack/watcher-applier-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.615795 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvr6g\" (UniqueName: \"kubernetes.io/projected/66b2b80e-c31b-4574-a218-26165964598a-kube-api-access-jvr6g\") pod \"watcher-applier-0\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " pod="openstack/watcher-applier-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.620407 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-q9f9f"] Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.628758 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-gmq8h"] Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.640256 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.664082 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.678170 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwvmf\" (UniqueName: \"kubernetes.io/projected/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-kube-api-access-zwvmf\") pod \"watcher-api-0\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.678246 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4p59\" (UniqueName: \"kubernetes.io/projected/04345ae3-20db-4b4d-bf7d-37daf1f39660-kube-api-access-c4p59\") pod \"watcher-decision-engine-0\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.678280 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.678305 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-config-data\") pod \"watcher-decision-engine-0\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.678329 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04345ae3-20db-4b4d-bf7d-37daf1f39660-logs\") pod \"watcher-decision-engine-0\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.678359 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.678381 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-logs\") pod \"watcher-api-0\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.678402 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.678421 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-config-data\") pod \"watcher-api-0\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.678455 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.680336 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04345ae3-20db-4b4d-bf7d-37daf1f39660-logs\") pod \"watcher-decision-engine-0\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.683230 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.683693 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-logs\") pod \"watcher-api-0\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.683729 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: W1202 14:35:43.685818 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod318a2e77_a615_4af8_bade_d574dd90ca91.slice/crio-2e1248a5ee03bf1b25bce71082f71bc5798b816fab91c488069ae1e3fdf01797 WatchSource:0}: Error finding container 2e1248a5ee03bf1b25bce71082f71bc5798b816fab91c488069ae1e3fdf01797: Status 404 returned error can't find the container with id 2e1248a5ee03bf1b25bce71082f71bc5798b816fab91c488069ae1e3fdf01797 Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.687289 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-pprmb"] Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.688761 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-config-data\") pod \"watcher-api-0\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.688966 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.705113 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4p59\" (UniqueName: \"kubernetes.io/projected/04345ae3-20db-4b4d-bf7d-37daf1f39660-kube-api-access-c4p59\") pod \"watcher-decision-engine-0\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.705833 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-config-data\") pod \"watcher-decision-engine-0\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.707511 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.726015 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-7n2ss"] Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.727846 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwvmf\" (UniqueName: \"kubernetes.io/projected/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-kube-api-access-zwvmf\") pod \"watcher-api-0\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: W1202 14:35:43.733481 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4666a144_4ec6_43f9_b3d9_74d0bef2e9a9.slice/crio-ced16ccc86506224bbdde8e9d114cb6a88c40d619af634fce57f0665f9e8fcca WatchSource:0}: Error finding container ced16ccc86506224bbdde8e9d114cb6a88c40d619af634fce57f0665f9e8fcca: Status 404 returned error can't find the container with id ced16ccc86506224bbdde8e9d114cb6a88c40d619af634fce57f0665f9e8fcca Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.739246 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.782900 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9d95d682-63ae-429c-9ef3-aa1e4ab751bf" (UID: "9d95d682-63ae-429c-9ef3-aa1e4ab751bf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.808384 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9d95d682-63ae-429c-9ef3-aa1e4ab751bf" (UID: "9d95d682-63ae-429c-9ef3-aa1e4ab751bf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:43 crc kubenswrapper[4902]: W1202 14:35:43.825990 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode30c107c_9639_41d6_a4de_283d44a0c7de.slice/crio-aaa1e77e16692125940d457ce4a7f54bca0927cb991dec030def8873e870e889 WatchSource:0}: Error finding container aaa1e77e16692125940d457ce4a7f54bca0927cb991dec030def8873e870e889: Status 404 returned error can't find the container with id aaa1e77e16692125940d457ce4a7f54bca0927cb991dec030def8873e870e889 Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.851646 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-config" (OuterVolumeSpecName: "config") pod "9d95d682-63ae-429c-9ef3-aa1e4ab751bf" (UID: "9d95d682-63ae-429c-9ef3-aa1e4ab751bf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.898404 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9d95d682-63ae-429c-9ef3-aa1e4ab751bf" (UID: "9d95d682-63ae-429c-9ef3-aa1e4ab751bf"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.898701 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-dns-swift-storage-0\") pod \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\" (UID: \"9d95d682-63ae-429c-9ef3-aa1e4ab751bf\") " Dec 02 14:35:43 crc kubenswrapper[4902]: W1202 14:35:43.899151 4902 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/9d95d682-63ae-429c-9ef3-aa1e4ab751bf/volumes/kubernetes.io~configmap/dns-swift-storage-0 Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.899165 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9d95d682-63ae-429c-9ef3-aa1e4ab751bf" (UID: "9d95d682-63ae-429c-9ef3-aa1e4ab751bf"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.899805 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.899819 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.899831 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.899858 4902 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:43 crc kubenswrapper[4902]: I1202 14:35:43.910817 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9d95d682-63ae-429c-9ef3-aa1e4ab751bf" (UID: "9d95d682-63ae-429c-9ef3-aa1e4ab751bf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.002271 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9d95d682-63ae-429c-9ef3-aa1e4ab751bf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.021412 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.094412 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.094432 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6cdbd8859c-g8rmc"] Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.127520 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" event={"ID":"e30c107c-9639-41d6-a4de-283d44a0c7de","Type":"ContainerStarted","Data":"aaa1e77e16692125940d457ce4a7f54bca0927cb991dec030def8873e870e889"} Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.133484 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gmq8h" event={"ID":"b7c64325-9e55-496c-b643-2e053ee69bdb","Type":"ContainerStarted","Data":"c25701d6c369d37488f1f95ca2935cea75573491e0aabf49d54b954936f9fcfc"} Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.142958 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6fc8fd9965-7vwnt"] Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.148710 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.166148 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-ff5b6d47-dgpgd" event={"ID":"318a2e77-a615-4af8-bade-d574dd90ca91","Type":"ContainerStarted","Data":"2e1248a5ee03bf1b25bce71082f71bc5798b816fab91c488069ae1e3fdf01797"} Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.201094 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-q9f9f" event={"ID":"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9","Type":"ContainerStarted","Data":"ced16ccc86506224bbdde8e9d114cb6a88c40d619af634fce57f0665f9e8fcca"} Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.214083 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97eb051a-3187-447f-8bf6-71ae3d8f65d7","Type":"ContainerStarted","Data":"5a2c3ba3e052ad7136111c6e6c7a22e4b076d6e8a151313fb5b7f477f25ae2c6"} Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.240198 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6fc8fd9965-7vwnt"] Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.247925 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" event={"ID":"9d95d682-63ae-429c-9ef3-aa1e4ab751bf","Type":"ContainerDied","Data":"2665d1455c9185fd4caf369fea2aebb8cbfa7c21838e0cb601a10b00fb1bdd9a"} Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.247987 4902 scope.go:117] "RemoveContainer" containerID="32ed45bd5c18368fea5b684afe5f88e0802d5b93546257cf1469bba96ef70689" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.248169 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-vqd9x" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.279407 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.288199 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6cdbd8859c-g8rmc" event={"ID":"705850b6-c4e0-43ae-a1b2-1a8c536b97bd","Type":"ContainerStarted","Data":"ccf94bc9240894c8012923fb286496cde7de96ee0685062f838780c6d8cb97e8"} Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.289616 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" event={"ID":"49f4dba9-30ea-4a41-a1e8-e88182dbb669","Type":"ContainerStarted","Data":"8a9ffb9f41367dff32c8bf3eb38037071fb34b6b8c3b883361398e6bfb0be49d"} Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.289731 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" event={"ID":"49f4dba9-30ea-4a41-a1e8-e88182dbb669","Type":"ContainerStarted","Data":"03877d97f0b840699066aa06e3ffb7dcf3f84adb7924965de083f734352fdba1"} Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.294847 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f5597" event={"ID":"9de2acde-b30a-4bb5-afdb-a56374e82d10","Type":"ContainerStarted","Data":"8b6b40627f0d46cc07acf520db3c4d594a7a2b05ef5e8024f4d7fdd316861455"} Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.295107 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f5597" event={"ID":"9de2acde-b30a-4bb5-afdb-a56374e82d10","Type":"ContainerStarted","Data":"f94e598e79dc5a06a1090a1564ac3585fbf945dbfd2398bfcd2991c0cd0a5527"} Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.318429 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-scripts\") pod \"horizon-6fc8fd9965-7vwnt\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.318510 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chvbv\" (UniqueName: \"kubernetes.io/projected/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-kube-api-access-chvbv\") pod \"horizon-6fc8fd9965-7vwnt\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.318731 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-config-data\") pod \"horizon-6fc8fd9965-7vwnt\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.319165 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-horizon-secret-key\") pod \"horizon-6fc8fd9965-7vwnt\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.319245 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-logs\") pod \"horizon-6fc8fd9965-7vwnt\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.331834 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rgnm2" event={"ID":"52b3cd40-3726-4b52-8e64-8b15f5f02a99","Type":"ContainerStarted","Data":"5e30f8ba1c6660dbd143db2d874a9e1f9d253a0a0e857a141a652c049535cd49"} Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.369688 4902 scope.go:117] "RemoveContainer" containerID="4e5941061fe5ccdb349904d2563991dcdc32344b13f0d888b76ff9b28e499f9e" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.370131 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7n2ss" event={"ID":"9c39a4fe-20e0-458b-a226-d6f5ad9cb846","Type":"ContainerStarted","Data":"ed49014e35c9637e1292929765f60308de670f1a4fbee43f35b8baf912a51c6a"} Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.442074 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-scripts\") pod \"horizon-6fc8fd9965-7vwnt\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.442328 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chvbv\" (UniqueName: \"kubernetes.io/projected/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-kube-api-access-chvbv\") pod \"horizon-6fc8fd9965-7vwnt\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.442582 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-config-data\") pod \"horizon-6fc8fd9965-7vwnt\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.442851 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-horizon-secret-key\") pod \"horizon-6fc8fd9965-7vwnt\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.442992 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-logs\") pod \"horizon-6fc8fd9965-7vwnt\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.444747 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-scripts\") pod \"horizon-6fc8fd9965-7vwnt\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.446482 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-config-data\") pod \"horizon-6fc8fd9965-7vwnt\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.447308 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-f5597" podStartSLOduration=3.44728958 podStartE2EDuration="3.44728958s" podCreationTimestamp="2025-12-02 14:35:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:35:44.442093894 +0000 UTC m=+1175.633402603" watchObservedRunningTime="2025-12-02 14:35:44.44728958 +0000 UTC m=+1175.638598309" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.448674 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.459070 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-logs\") pod \"horizon-6fc8fd9965-7vwnt\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.468442 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-horizon-secret-key\") pod \"horizon-6fc8fd9965-7vwnt\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.474285 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chvbv\" (UniqueName: \"kubernetes.io/projected/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-kube-api-access-chvbv\") pod \"horizon-6fc8fd9965-7vwnt\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.577717 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-vqd9x"] Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.592360 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-vqd9x"] Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.639388 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.956712 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:44 crc kubenswrapper[4902]: I1202 14:35:44.960847 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.040760 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.075945 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4sdx\" (UniqueName: \"kubernetes.io/projected/49f4dba9-30ea-4a41-a1e8-e88182dbb669-kube-api-access-n4sdx\") pod \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.076278 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-config\") pod \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.076400 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-ovsdbserver-nb\") pod \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.076463 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-ovsdbserver-sb\") pod \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.076545 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-dns-svc\") pod \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.076604 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-dns-swift-storage-0\") pod \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\" (UID: \"49f4dba9-30ea-4a41-a1e8-e88182dbb669\") " Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.106875 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49f4dba9-30ea-4a41-a1e8-e88182dbb669-kube-api-access-n4sdx" (OuterVolumeSpecName: "kube-api-access-n4sdx") pod "49f4dba9-30ea-4a41-a1e8-e88182dbb669" (UID: "49f4dba9-30ea-4a41-a1e8-e88182dbb669"). InnerVolumeSpecName "kube-api-access-n4sdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.108475 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "49f4dba9-30ea-4a41-a1e8-e88182dbb669" (UID: "49f4dba9-30ea-4a41-a1e8-e88182dbb669"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.149587 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "49f4dba9-30ea-4a41-a1e8-e88182dbb669" (UID: "49f4dba9-30ea-4a41-a1e8-e88182dbb669"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.164514 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d95d682-63ae-429c-9ef3-aa1e4ab751bf" path="/var/lib/kubelet/pods/9d95d682-63ae-429c-9ef3-aa1e4ab751bf/volumes" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.167908 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-config" (OuterVolumeSpecName: "config") pod "49f4dba9-30ea-4a41-a1e8-e88182dbb669" (UID: "49f4dba9-30ea-4a41-a1e8-e88182dbb669"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.179440 4902 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.179461 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4sdx\" (UniqueName: \"kubernetes.io/projected/49f4dba9-30ea-4a41-a1e8-e88182dbb669-kube-api-access-n4sdx\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.179470 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.179481 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.202709 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "49f4dba9-30ea-4a41-a1e8-e88182dbb669" (UID: "49f4dba9-30ea-4a41-a1e8-e88182dbb669"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.206954 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "49f4dba9-30ea-4a41-a1e8-e88182dbb669" (UID: "49f4dba9-30ea-4a41-a1e8-e88182dbb669"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.282156 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.282182 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/49f4dba9-30ea-4a41-a1e8-e88182dbb669-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.296715 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 02 14:35:45 crc kubenswrapper[4902]: W1202 14:35:45.333831 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04345ae3_20db_4b4d_bf7d_37daf1f39660.slice/crio-98bf6b62faff88b80b6be999fe63f58317aee854ab2d58d8ebca65b2a7d9fc3f WatchSource:0}: Error finding container 98bf6b62faff88b80b6be999fe63f58317aee854ab2d58d8ebca65b2a7d9fc3f: Status 404 returned error can't find the container with id 98bf6b62faff88b80b6be999fe63f58317aee854ab2d58d8ebca65b2a7d9fc3f Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.394707 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"04345ae3-20db-4b4d-bf7d-37daf1f39660","Type":"ContainerStarted","Data":"98bf6b62faff88b80b6be999fe63f58317aee854ab2d58d8ebca65b2a7d9fc3f"} Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.410775 4902 generic.go:334] "Generic (PLEG): container finished" podID="e30c107c-9639-41d6-a4de-283d44a0c7de" containerID="989fbc95ce9a434b18c838996717c1b69cbbd232c02991715a484a5410716285" exitCode=0 Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.410858 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" event={"ID":"e30c107c-9639-41d6-a4de-283d44a0c7de","Type":"ContainerDied","Data":"989fbc95ce9a434b18c838996717c1b69cbbd232c02991715a484a5410716285"} Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.419030 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gmq8h" event={"ID":"b7c64325-9e55-496c-b643-2e053ee69bdb","Type":"ContainerStarted","Data":"354b6351255c643eebd7bf96c2d062773e0d2a686363e51f7b340b618afd4664"} Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.438535 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"66b2b80e-c31b-4574-a218-26165964598a","Type":"ContainerStarted","Data":"4babe4e537278e0c4b196841a400e2ea5f67113835056149df808460ac3589e3"} Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.458012 4902 generic.go:334] "Generic (PLEG): container finished" podID="49f4dba9-30ea-4a41-a1e8-e88182dbb669" containerID="8a9ffb9f41367dff32c8bf3eb38037071fb34b6b8c3b883361398e6bfb0be49d" exitCode=0 Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.458095 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" event={"ID":"49f4dba9-30ea-4a41-a1e8-e88182dbb669","Type":"ContainerDied","Data":"8a9ffb9f41367dff32c8bf3eb38037071fb34b6b8c3b883361398e6bfb0be49d"} Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.458139 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" event={"ID":"49f4dba9-30ea-4a41-a1e8-e88182dbb669","Type":"ContainerDied","Data":"03877d97f0b840699066aa06e3ffb7dcf3f84adb7924965de083f734352fdba1"} Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.458154 4902 scope.go:117] "RemoveContainer" containerID="8a9ffb9f41367dff32c8bf3eb38037071fb34b6b8c3b883361398e6bfb0be49d" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.458317 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-hrcjj" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.473564 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a","Type":"ContainerStarted","Data":"d38450e1da8e6ff683b5c510372562622ddc32b4fa64193dbf0c64f238c0d0a0"} Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.490157 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-gmq8h" podStartSLOduration=4.490138976 podStartE2EDuration="4.490138976s" podCreationTimestamp="2025-12-02 14:35:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:35:45.467805348 +0000 UTC m=+1176.659114057" watchObservedRunningTime="2025-12-02 14:35:45.490138976 +0000 UTC m=+1176.681447685" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.553217 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6fc8fd9965-7vwnt"] Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.637998 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-hrcjj"] Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.659506 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-hrcjj"] Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.679928 4902 scope.go:117] "RemoveContainer" containerID="8a9ffb9f41367dff32c8bf3eb38037071fb34b6b8c3b883361398e6bfb0be49d" Dec 02 14:35:45 crc kubenswrapper[4902]: E1202 14:35:45.680955 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a9ffb9f41367dff32c8bf3eb38037071fb34b6b8c3b883361398e6bfb0be49d\": container with ID starting with 8a9ffb9f41367dff32c8bf3eb38037071fb34b6b8c3b883361398e6bfb0be49d not found: ID does not exist" containerID="8a9ffb9f41367dff32c8bf3eb38037071fb34b6b8c3b883361398e6bfb0be49d" Dec 02 14:35:45 crc kubenswrapper[4902]: I1202 14:35:45.681038 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a9ffb9f41367dff32c8bf3eb38037071fb34b6b8c3b883361398e6bfb0be49d"} err="failed to get container status \"8a9ffb9f41367dff32c8bf3eb38037071fb34b6b8c3b883361398e6bfb0be49d\": rpc error: code = NotFound desc = could not find container \"8a9ffb9f41367dff32c8bf3eb38037071fb34b6b8c3b883361398e6bfb0be49d\": container with ID starting with 8a9ffb9f41367dff32c8bf3eb38037071fb34b6b8c3b883361398e6bfb0be49d not found: ID does not exist" Dec 02 14:35:46 crc kubenswrapper[4902]: I1202 14:35:46.502454 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6fc8fd9965-7vwnt" event={"ID":"dcf00bba-e4b0-46e4-afcd-c4a475ff5047","Type":"ContainerStarted","Data":"8b25b3665e321af9d35c05d1af1c71ee5a9fc0d949e1c111c8c6d0b18de35d41"} Dec 02 14:35:46 crc kubenswrapper[4902]: I1202 14:35:46.515275 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a","Type":"ContainerStarted","Data":"8f01cc06b45b46ab4b409325f4a7fc22f4ee0561a47535da1f67f23cca544fc0"} Dec 02 14:35:46 crc kubenswrapper[4902]: I1202 14:35:46.515319 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a","Type":"ContainerStarted","Data":"e181b0bcac7ab8d7b6f7c3c28df1ec4924ea1be74c9ea594c1653a25aae77143"} Dec 02 14:35:46 crc kubenswrapper[4902]: I1202 14:35:46.515462 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerName="watcher-api-log" containerID="cri-o://e181b0bcac7ab8d7b6f7c3c28df1ec4924ea1be74c9ea594c1653a25aae77143" gracePeriod=30 Dec 02 14:35:46 crc kubenswrapper[4902]: I1202 14:35:46.516918 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerName="watcher-api" containerID="cri-o://8f01cc06b45b46ab4b409325f4a7fc22f4ee0561a47535da1f67f23cca544fc0" gracePeriod=30 Dec 02 14:35:46 crc kubenswrapper[4902]: I1202 14:35:46.517086 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 02 14:35:46 crc kubenswrapper[4902]: I1202 14:35:46.525416 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" event={"ID":"e30c107c-9639-41d6-a4de-283d44a0c7de","Type":"ContainerStarted","Data":"41cae3113f8c71e2b450bc6ab493125eb96587bf4f0c0bd36c1580fadfe06655"} Dec 02 14:35:46 crc kubenswrapper[4902]: I1202 14:35:46.525807 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:46 crc kubenswrapper[4902]: I1202 14:35:46.539186 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=3.539168166 podStartE2EDuration="3.539168166s" podCreationTimestamp="2025-12-02 14:35:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:35:46.536932513 +0000 UTC m=+1177.728241222" watchObservedRunningTime="2025-12-02 14:35:46.539168166 +0000 UTC m=+1177.730476875" Dec 02 14:35:46 crc kubenswrapper[4902]: I1202 14:35:46.542234 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.156:9322/\": EOF" Dec 02 14:35:46 crc kubenswrapper[4902]: I1202 14:35:46.564937 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" podStartSLOduration=4.56491807 podStartE2EDuration="4.56491807s" podCreationTimestamp="2025-12-02 14:35:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:35:46.556503783 +0000 UTC m=+1177.747812492" watchObservedRunningTime="2025-12-02 14:35:46.56491807 +0000 UTC m=+1177.756226779" Dec 02 14:35:47 crc kubenswrapper[4902]: I1202 14:35:47.123822 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49f4dba9-30ea-4a41-a1e8-e88182dbb669" path="/var/lib/kubelet/pods/49f4dba9-30ea-4a41-a1e8-e88182dbb669/volumes" Dec 02 14:35:47 crc kubenswrapper[4902]: I1202 14:35:47.554029 4902 generic.go:334] "Generic (PLEG): container finished" podID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerID="e181b0bcac7ab8d7b6f7c3c28df1ec4924ea1be74c9ea594c1653a25aae77143" exitCode=143 Dec 02 14:35:47 crc kubenswrapper[4902]: I1202 14:35:47.554101 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a","Type":"ContainerDied","Data":"e181b0bcac7ab8d7b6f7c3c28df1ec4924ea1be74c9ea594c1653a25aae77143"} Dec 02 14:35:48 crc kubenswrapper[4902]: I1202 14:35:48.580924 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"04345ae3-20db-4b4d-bf7d-37daf1f39660","Type":"ContainerStarted","Data":"8e3222a115913417d880a13e1e6de805cca664914504a7ab6a851207d1afcee1"} Dec 02 14:35:48 crc kubenswrapper[4902]: I1202 14:35:48.602615 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"66b2b80e-c31b-4574-a218-26165964598a","Type":"ContainerStarted","Data":"9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a"} Dec 02 14:35:48 crc kubenswrapper[4902]: I1202 14:35:48.605396 4902 generic.go:334] "Generic (PLEG): container finished" podID="9de2acde-b30a-4bb5-afdb-a56374e82d10" containerID="8b6b40627f0d46cc07acf520db3c4d594a7a2b05ef5e8024f4d7fdd316861455" exitCode=0 Dec 02 14:35:48 crc kubenswrapper[4902]: I1202 14:35:48.605433 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f5597" event={"ID":"9de2acde-b30a-4bb5-afdb-a56374e82d10","Type":"ContainerDied","Data":"8b6b40627f0d46cc07acf520db3c4d594a7a2b05ef5e8024f4d7fdd316861455"} Dec 02 14:35:48 crc kubenswrapper[4902]: I1202 14:35:48.615582 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=3.4274148220000002 podStartE2EDuration="5.615548284s" podCreationTimestamp="2025-12-02 14:35:43 +0000 UTC" firstStartedPulling="2025-12-02 14:35:45.346300261 +0000 UTC m=+1176.537608970" lastFinishedPulling="2025-12-02 14:35:47.534433723 +0000 UTC m=+1178.725742432" observedRunningTime="2025-12-02 14:35:48.601389356 +0000 UTC m=+1179.792698065" watchObservedRunningTime="2025-12-02 14:35:48.615548284 +0000 UTC m=+1179.806856993" Dec 02 14:35:48 crc kubenswrapper[4902]: I1202 14:35:48.623708 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-applier-0" podStartSLOduration=3.050616176 podStartE2EDuration="5.623685553s" podCreationTimestamp="2025-12-02 14:35:43 +0000 UTC" firstStartedPulling="2025-12-02 14:35:45.024145122 +0000 UTC m=+1176.215453831" lastFinishedPulling="2025-12-02 14:35:47.597214499 +0000 UTC m=+1178.788523208" observedRunningTime="2025-12-02 14:35:48.621827961 +0000 UTC m=+1179.813136670" watchObservedRunningTime="2025-12-02 14:35:48.623685553 +0000 UTC m=+1179.814994262" Dec 02 14:35:48 crc kubenswrapper[4902]: I1202 14:35:48.671900 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-applier-0" Dec 02 14:35:48 crc kubenswrapper[4902]: I1202 14:35:48.739543 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.329301 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-ff5b6d47-dgpgd"] Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.368824 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-85bf5878d8-bn7cr"] Dec 02 14:35:50 crc kubenswrapper[4902]: E1202 14:35:50.369256 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49f4dba9-30ea-4a41-a1e8-e88182dbb669" containerName="init" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.369280 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="49f4dba9-30ea-4a41-a1e8-e88182dbb669" containerName="init" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.370039 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="49f4dba9-30ea-4a41-a1e8-e88182dbb669" containerName="init" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.371054 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.375931 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.391585 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6eab04a1-6320-449e-9f31-600aa46a57b0-config-data\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.391638 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6eab04a1-6320-449e-9f31-600aa46a57b0-logs\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.391867 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-horizon-tls-certs\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.392135 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6jdx\" (UniqueName: \"kubernetes.io/projected/6eab04a1-6320-449e-9f31-600aa46a57b0-kube-api-access-v6jdx\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.392308 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6eab04a1-6320-449e-9f31-600aa46a57b0-scripts\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.392369 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-combined-ca-bundle\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.392395 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-horizon-secret-key\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.432799 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-85bf5878d8-bn7cr"] Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.493688 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6eab04a1-6320-449e-9f31-600aa46a57b0-scripts\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.493738 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-combined-ca-bundle\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.493755 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-horizon-secret-key\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.493791 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6eab04a1-6320-449e-9f31-600aa46a57b0-config-data\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.493812 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6eab04a1-6320-449e-9f31-600aa46a57b0-logs\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.493870 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-horizon-tls-certs\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.493904 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6jdx\" (UniqueName: \"kubernetes.io/projected/6eab04a1-6320-449e-9f31-600aa46a57b0-kube-api-access-v6jdx\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.496258 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6eab04a1-6320-449e-9f31-600aa46a57b0-config-data\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.496804 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6eab04a1-6320-449e-9f31-600aa46a57b0-scripts\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.498010 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6eab04a1-6320-449e-9f31-600aa46a57b0-logs\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.503737 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-horizon-secret-key\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.505555 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-combined-ca-bundle\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.512025 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6fc8fd9965-7vwnt"] Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.518170 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-horizon-tls-certs\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.538451 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6jdx\" (UniqueName: \"kubernetes.io/projected/6eab04a1-6320-449e-9f31-600aa46a57b0-kube-api-access-v6jdx\") pod \"horizon-85bf5878d8-bn7cr\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.616808 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-54c4bbdfbb-v8pjf"] Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.623831 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.652598 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-54c4bbdfbb-v8pjf"] Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.750996 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.802761 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlnkt\" (UniqueName: \"kubernetes.io/projected/462ead25-02ec-4791-b927-56cf3f26ca39-kube-api-access-jlnkt\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.802835 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/462ead25-02ec-4791-b927-56cf3f26ca39-horizon-tls-certs\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.802885 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/462ead25-02ec-4791-b927-56cf3f26ca39-logs\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.802921 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/462ead25-02ec-4791-b927-56cf3f26ca39-combined-ca-bundle\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.804381 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/462ead25-02ec-4791-b927-56cf3f26ca39-scripts\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.804625 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/462ead25-02ec-4791-b927-56cf3f26ca39-horizon-secret-key\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.804751 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/462ead25-02ec-4791-b927-56cf3f26ca39-config-data\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.906267 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/462ead25-02ec-4791-b927-56cf3f26ca39-logs\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.906315 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/462ead25-02ec-4791-b927-56cf3f26ca39-combined-ca-bundle\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.906379 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/462ead25-02ec-4791-b927-56cf3f26ca39-scripts\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.906637 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/462ead25-02ec-4791-b927-56cf3f26ca39-horizon-secret-key\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.906693 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/462ead25-02ec-4791-b927-56cf3f26ca39-config-data\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.906735 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlnkt\" (UniqueName: \"kubernetes.io/projected/462ead25-02ec-4791-b927-56cf3f26ca39-kube-api-access-jlnkt\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.906770 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/462ead25-02ec-4791-b927-56cf3f26ca39-horizon-tls-certs\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.906731 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/462ead25-02ec-4791-b927-56cf3f26ca39-logs\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.907880 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/462ead25-02ec-4791-b927-56cf3f26ca39-scripts\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.910142 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/462ead25-02ec-4791-b927-56cf3f26ca39-config-data\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.912386 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/462ead25-02ec-4791-b927-56cf3f26ca39-combined-ca-bundle\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.919171 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/462ead25-02ec-4791-b927-56cf3f26ca39-horizon-tls-certs\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.919577 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/462ead25-02ec-4791-b927-56cf3f26ca39-horizon-secret-key\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.932170 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlnkt\" (UniqueName: \"kubernetes.io/projected/462ead25-02ec-4791-b927-56cf3f26ca39-kube-api-access-jlnkt\") pod \"horizon-54c4bbdfbb-v8pjf\" (UID: \"462ead25-02ec-4791-b927-56cf3f26ca39\") " pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:50 crc kubenswrapper[4902]: I1202 14:35:50.964048 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.054961 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.241709 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-credential-keys\") pod \"9de2acde-b30a-4bb5-afdb-a56374e82d10\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.241759 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-combined-ca-bundle\") pod \"9de2acde-b30a-4bb5-afdb-a56374e82d10\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.241826 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-scripts\") pod \"9de2acde-b30a-4bb5-afdb-a56374e82d10\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.241884 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-config-data\") pod \"9de2acde-b30a-4bb5-afdb-a56374e82d10\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.241942 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-fernet-keys\") pod \"9de2acde-b30a-4bb5-afdb-a56374e82d10\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.242023 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nf4ff\" (UniqueName: \"kubernetes.io/projected/9de2acde-b30a-4bb5-afdb-a56374e82d10-kube-api-access-nf4ff\") pod \"9de2acde-b30a-4bb5-afdb-a56374e82d10\" (UID: \"9de2acde-b30a-4bb5-afdb-a56374e82d10\") " Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.247831 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9de2acde-b30a-4bb5-afdb-a56374e82d10-kube-api-access-nf4ff" (OuterVolumeSpecName: "kube-api-access-nf4ff") pod "9de2acde-b30a-4bb5-afdb-a56374e82d10" (UID: "9de2acde-b30a-4bb5-afdb-a56374e82d10"). InnerVolumeSpecName "kube-api-access-nf4ff". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.248518 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9de2acde-b30a-4bb5-afdb-a56374e82d10" (UID: "9de2acde-b30a-4bb5-afdb-a56374e82d10"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.251844 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "9de2acde-b30a-4bb5-afdb-a56374e82d10" (UID: "9de2acde-b30a-4bb5-afdb-a56374e82d10"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.270745 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-scripts" (OuterVolumeSpecName: "scripts") pod "9de2acde-b30a-4bb5-afdb-a56374e82d10" (UID: "9de2acde-b30a-4bb5-afdb-a56374e82d10"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.276119 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-config-data" (OuterVolumeSpecName: "config-data") pod "9de2acde-b30a-4bb5-afdb-a56374e82d10" (UID: "9de2acde-b30a-4bb5-afdb-a56374e82d10"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.293912 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9de2acde-b30a-4bb5-afdb-a56374e82d10" (UID: "9de2acde-b30a-4bb5-afdb-a56374e82d10"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.351718 4902 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.351759 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.351773 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.351784 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.351795 4902 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9de2acde-b30a-4bb5-afdb-a56374e82d10-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.351805 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nf4ff\" (UniqueName: \"kubernetes.io/projected/9de2acde-b30a-4bb5-afdb-a56374e82d10-kube-api-access-nf4ff\") on node \"crc\" DevicePath \"\"" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.575836 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.658085 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-sl7jl"] Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.658325 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-sl7jl" podUID="ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" containerName="dnsmasq-dns" containerID="cri-o://11e3c5ae1feb2872ebb2faa127d97f7d977b2e6386b642120c965de90fa72407" gracePeriod=10 Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.704272 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f5597" event={"ID":"9de2acde-b30a-4bb5-afdb-a56374e82d10","Type":"ContainerDied","Data":"f94e598e79dc5a06a1090a1564ac3585fbf945dbfd2398bfcd2991c0cd0a5527"} Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.704313 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f94e598e79dc5a06a1090a1564ac3585fbf945dbfd2398bfcd2991c0cd0a5527" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.704379 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f5597" Dec 02 14:35:52 crc kubenswrapper[4902]: I1202 14:35:52.992029 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.156:9322/\": read tcp 10.217.0.2:36552->10.217.0.156:9322: read: connection reset by peer" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.193322 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-f5597"] Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.201214 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-f5597"] Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.318026 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-sr89q"] Dec 02 14:35:53 crc kubenswrapper[4902]: E1202 14:35:53.318395 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9de2acde-b30a-4bb5-afdb-a56374e82d10" containerName="keystone-bootstrap" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.318412 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9de2acde-b30a-4bb5-afdb-a56374e82d10" containerName="keystone-bootstrap" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.318589 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9de2acde-b30a-4bb5-afdb-a56374e82d10" containerName="keystone-bootstrap" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.319153 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.325991 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.326041 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.326050 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.326197 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-5shcs" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.330670 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.374071 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-scripts\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.374221 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-config-data\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.374249 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9jvk\" (UniqueName: \"kubernetes.io/projected/082d182d-ad5d-4d70-ab12-bac95950cc73-kube-api-access-f9jvk\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.374297 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-fernet-keys\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.374399 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-combined-ca-bundle\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.374464 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-credential-keys\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.390163 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-sr89q"] Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.476481 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-combined-ca-bundle\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.476592 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-credential-keys\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.476626 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-scripts\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.476691 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-config-data\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.476709 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9jvk\" (UniqueName: \"kubernetes.io/projected/082d182d-ad5d-4d70-ab12-bac95950cc73-kube-api-access-f9jvk\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.476744 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-fernet-keys\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.488982 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-combined-ca-bundle\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.490222 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-fernet-keys\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.491093 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-config-data\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.491246 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-credential-keys\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.496451 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-scripts\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.497883 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9jvk\" (UniqueName: \"kubernetes.io/projected/082d182d-ad5d-4d70-ab12-bac95950cc73-kube-api-access-f9jvk\") pod \"keystone-bootstrap-sr89q\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.642670 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.665556 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-applier-0" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.706105 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-applier-0" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.720521 4902 generic.go:334] "Generic (PLEG): container finished" podID="ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" containerID="11e3c5ae1feb2872ebb2faa127d97f7d977b2e6386b642120c965de90fa72407" exitCode=0 Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.720601 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-sl7jl" event={"ID":"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a","Type":"ContainerDied","Data":"11e3c5ae1feb2872ebb2faa127d97f7d977b2e6386b642120c965de90fa72407"} Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.724853 4902 generic.go:334] "Generic (PLEG): container finished" podID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerID="8f01cc06b45b46ab4b409325f4a7fc22f4ee0561a47535da1f67f23cca544fc0" exitCode=0 Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.724893 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a","Type":"ContainerDied","Data":"8f01cc06b45b46ab4b409325f4a7fc22f4ee0561a47535da1f67f23cca544fc0"} Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.740227 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.156:9322/\": dial tcp 10.217.0.156:9322: connect: connection refused" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.766581 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-applier-0" Dec 02 14:35:53 crc kubenswrapper[4902]: I1202 14:35:53.819409 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-applier-0"] Dec 02 14:35:54 crc kubenswrapper[4902]: I1202 14:35:54.095845 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 02 14:35:54 crc kubenswrapper[4902]: I1202 14:35:54.123699 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Dec 02 14:35:54 crc kubenswrapper[4902]: I1202 14:35:54.736835 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 02 14:35:54 crc kubenswrapper[4902]: I1202 14:35:54.791861 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Dec 02 14:35:54 crc kubenswrapper[4902]: I1202 14:35:54.835324 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 02 14:35:55 crc kubenswrapper[4902]: I1202 14:35:55.126390 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9de2acde-b30a-4bb5-afdb-a56374e82d10" path="/var/lib/kubelet/pods/9de2acde-b30a-4bb5-afdb-a56374e82d10/volumes" Dec 02 14:35:55 crc kubenswrapper[4902]: I1202 14:35:55.555624 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-sl7jl" podUID="ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.120:5353: connect: connection refused" Dec 02 14:35:55 crc kubenswrapper[4902]: I1202 14:35:55.745963 4902 generic.go:334] "Generic (PLEG): container finished" podID="a7ca0ff1-b6ca-496c-b02e-71cc900b4433" containerID="06603acd7ccdef64ab15b6c4d400dd45df7d55e7572ab85d4c7e59e2e97f44d0" exitCode=0 Dec 02 14:35:55 crc kubenswrapper[4902]: I1202 14:35:55.746106 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8gzqw" event={"ID":"a7ca0ff1-b6ca-496c-b02e-71cc900b4433","Type":"ContainerDied","Data":"06603acd7ccdef64ab15b6c4d400dd45df7d55e7572ab85d4c7e59e2e97f44d0"} Dec 02 14:35:55 crc kubenswrapper[4902]: I1202 14:35:55.746266 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-applier-0" podUID="66b2b80e-c31b-4574-a218-26165964598a" containerName="watcher-applier" containerID="cri-o://9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a" gracePeriod=30 Dec 02 14:35:56 crc kubenswrapper[4902]: I1202 14:35:56.755315 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-decision-engine-0" podUID="04345ae3-20db-4b4d-bf7d-37daf1f39660" containerName="watcher-decision-engine" containerID="cri-o://8e3222a115913417d880a13e1e6de805cca664914504a7ab6a851207d1afcee1" gracePeriod=30 Dec 02 14:35:58 crc kubenswrapper[4902]: E1202 14:35:58.667123 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 02 14:35:58 crc kubenswrapper[4902]: E1202 14:35:58.668832 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 02 14:35:58 crc kubenswrapper[4902]: E1202 14:35:58.670154 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 02 14:35:58 crc kubenswrapper[4902]: E1202 14:35:58.670229 4902 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/watcher-applier-0" podUID="66b2b80e-c31b-4574-a218-26165964598a" containerName="watcher-applier" Dec 02 14:35:58 crc kubenswrapper[4902]: I1202 14:35:58.739823 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.156:9322/\": dial tcp 10.217.0.156:9322: connect: connection refused" Dec 02 14:35:59 crc kubenswrapper[4902]: E1202 14:35:59.299555 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Dec 02 14:35:59 crc kubenswrapper[4902]: E1202 14:35:59.299759 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6k2rq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-q9f9f_openstack(4666a144-4ec6-43f9-b3d9-74d0bef2e9a9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 14:35:59 crc kubenswrapper[4902]: E1202 14:35:59.300955 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-q9f9f" podUID="4666a144-4ec6-43f9-b3d9-74d0bef2e9a9" Dec 02 14:35:59 crc kubenswrapper[4902]: I1202 14:35:59.791847 4902 generic.go:334] "Generic (PLEG): container finished" podID="66b2b80e-c31b-4574-a218-26165964598a" containerID="9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a" exitCode=0 Dec 02 14:35:59 crc kubenswrapper[4902]: I1202 14:35:59.791970 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"66b2b80e-c31b-4574-a218-26165964598a","Type":"ContainerDied","Data":"9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a"} Dec 02 14:35:59 crc kubenswrapper[4902]: E1202 14:35:59.794807 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-q9f9f" podUID="4666a144-4ec6-43f9-b3d9-74d0bef2e9a9" Dec 02 14:36:00 crc kubenswrapper[4902]: I1202 14:36:00.555197 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-sl7jl" podUID="ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.120:5353: connect: connection refused" Dec 02 14:36:00 crc kubenswrapper[4902]: I1202 14:36:00.805949 4902 generic.go:334] "Generic (PLEG): container finished" podID="04345ae3-20db-4b4d-bf7d-37daf1f39660" containerID="8e3222a115913417d880a13e1e6de805cca664914504a7ab6a851207d1afcee1" exitCode=0 Dec 02 14:36:00 crc kubenswrapper[4902]: I1202 14:36:00.806007 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"04345ae3-20db-4b4d-bf7d-37daf1f39660","Type":"ContainerDied","Data":"8e3222a115913417d880a13e1e6de805cca664914504a7ab6a851207d1afcee1"} Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.117810 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8gzqw" Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.142448 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-config-data\") pod \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.142877 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-db-sync-config-data\") pod \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.142912 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkhf8\" (UniqueName: \"kubernetes.io/projected/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-kube-api-access-lkhf8\") pod \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.142946 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-combined-ca-bundle\") pod \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\" (UID: \"a7ca0ff1-b6ca-496c-b02e-71cc900b4433\") " Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.160658 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-kube-api-access-lkhf8" (OuterVolumeSpecName: "kube-api-access-lkhf8") pod "a7ca0ff1-b6ca-496c-b02e-71cc900b4433" (UID: "a7ca0ff1-b6ca-496c-b02e-71cc900b4433"). InnerVolumeSpecName "kube-api-access-lkhf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.172856 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a7ca0ff1-b6ca-496c-b02e-71cc900b4433" (UID: "a7ca0ff1-b6ca-496c-b02e-71cc900b4433"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.181745 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7ca0ff1-b6ca-496c-b02e-71cc900b4433" (UID: "a7ca0ff1-b6ca-496c-b02e-71cc900b4433"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.236846 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-config-data" (OuterVolumeSpecName: "config-data") pod "a7ca0ff1-b6ca-496c-b02e-71cc900b4433" (UID: "a7ca0ff1-b6ca-496c-b02e-71cc900b4433"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.245193 4902 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.245223 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkhf8\" (UniqueName: \"kubernetes.io/projected/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-kube-api-access-lkhf8\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.245234 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.245242 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7ca0ff1-b6ca-496c-b02e-71cc900b4433-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.819344 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8gzqw" event={"ID":"a7ca0ff1-b6ca-496c-b02e-71cc900b4433","Type":"ContainerDied","Data":"c9588d8760c949c80926b90deca50c47cc86bb07cd3cc238afe75b4ef17ceaa4"} Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.819422 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9588d8760c949c80926b90deca50c47cc86bb07cd3cc238afe75b4ef17ceaa4" Dec 02 14:36:01 crc kubenswrapper[4902]: I1202 14:36:01.819540 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8gzqw" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.525964 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-99ww9"] Dec 02 14:36:02 crc kubenswrapper[4902]: E1202 14:36:02.526648 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7ca0ff1-b6ca-496c-b02e-71cc900b4433" containerName="glance-db-sync" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.526666 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7ca0ff1-b6ca-496c-b02e-71cc900b4433" containerName="glance-db-sync" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.526861 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7ca0ff1-b6ca-496c-b02e-71cc900b4433" containerName="glance-db-sync" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.527844 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.545029 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-99ww9"] Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.579818 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-config\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.579868 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.579902 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.579939 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjdh2\" (UniqueName: \"kubernetes.io/projected/29642134-e5dd-44f7-aa97-a3b70af91779-kube-api-access-mjdh2\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.579963 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.579994 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.683023 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjdh2\" (UniqueName: \"kubernetes.io/projected/29642134-e5dd-44f7-aa97-a3b70af91779-kube-api-access-mjdh2\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.683078 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.683148 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.683436 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-config\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.683497 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.683587 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.684113 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.684178 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.684482 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.685071 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-config\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.685693 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.721262 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjdh2\" (UniqueName: \"kubernetes.io/projected/29642134-e5dd-44f7-aa97-a3b70af91779-kube-api-access-mjdh2\") pod \"dnsmasq-dns-785d8bcb8c-99ww9\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:02 crc kubenswrapper[4902]: I1202 14:36:02.858074 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.436927 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.438769 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.440708 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-64b6w" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.440864 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.441154 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.454091 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.498936 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.498993 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.499043 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-logs\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.499083 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-scripts\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.499371 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.499419 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-config-data\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.499453 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zcwr\" (UniqueName: \"kubernetes.io/projected/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-kube-api-access-9zcwr\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.601230 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.601276 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.601325 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-logs\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.601359 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-scripts\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.601414 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.601431 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-config-data\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.601449 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zcwr\" (UniqueName: \"kubernetes.io/projected/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-kube-api-access-9zcwr\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.601781 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.601885 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.601914 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-logs\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.605717 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-scripts\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.608658 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-config-data\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.616815 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.620787 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zcwr\" (UniqueName: \"kubernetes.io/projected/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-kube-api-access-9zcwr\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.645536 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: E1202 14:36:03.670932 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a is running failed: container process not found" containerID="9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 02 14:36:03 crc kubenswrapper[4902]: E1202 14:36:03.673709 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a is running failed: container process not found" containerID="9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 02 14:36:03 crc kubenswrapper[4902]: E1202 14:36:03.674068 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a is running failed: container process not found" containerID="9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 02 14:36:03 crc kubenswrapper[4902]: E1202 14:36:03.674117 4902 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a is running failed: container process not found" probeType="Readiness" pod="openstack/watcher-applier-0" podUID="66b2b80e-c31b-4574-a218-26165964598a" containerName="watcher-applier" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.677106 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.678991 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.681111 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.695878 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.704633 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.704709 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6728665d-94d1-4dae-904e-17a2c82959be-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.704749 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.704774 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.704819 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6728665d-94d1-4dae-904e-17a2c82959be-logs\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.704941 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkkbm\" (UniqueName: \"kubernetes.io/projected/6728665d-94d1-4dae-904e-17a2c82959be-kube-api-access-xkkbm\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.704967 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.773765 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.806612 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6728665d-94d1-4dae-904e-17a2c82959be-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.806663 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.806682 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.806718 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6728665d-94d1-4dae-904e-17a2c82959be-logs\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.806799 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkkbm\" (UniqueName: \"kubernetes.io/projected/6728665d-94d1-4dae-904e-17a2c82959be-kube-api-access-xkkbm\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.806817 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.806854 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.806906 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.807259 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6728665d-94d1-4dae-904e-17a2c82959be-logs\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.807657 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6728665d-94d1-4dae-904e-17a2c82959be-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.814495 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.815003 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.815718 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.824489 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkkbm\" (UniqueName: \"kubernetes.io/projected/6728665d-94d1-4dae-904e-17a2c82959be-kube-api-access-xkkbm\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.844952 4902 generic.go:334] "Generic (PLEG): container finished" podID="b7c64325-9e55-496c-b643-2e053ee69bdb" containerID="354b6351255c643eebd7bf96c2d062773e0d2a686363e51f7b340b618afd4664" exitCode=0 Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.845015 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gmq8h" event={"ID":"b7c64325-9e55-496c-b643-2e053ee69bdb","Type":"ContainerDied","Data":"354b6351255c643eebd7bf96c2d062773e0d2a686363e51f7b340b618afd4664"} Dec 02 14:36:03 crc kubenswrapper[4902]: I1202 14:36:03.850948 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:04 crc kubenswrapper[4902]: I1202 14:36:04.024410 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:05 crc kubenswrapper[4902]: I1202 14:36:05.105463 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 14:36:05 crc kubenswrapper[4902]: I1202 14:36:05.168687 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 14:36:08 crc kubenswrapper[4902]: E1202 14:36:08.665491 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a is running failed: container process not found" containerID="9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 02 14:36:08 crc kubenswrapper[4902]: E1202 14:36:08.665953 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a is running failed: container process not found" containerID="9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 02 14:36:08 crc kubenswrapper[4902]: E1202 14:36:08.666170 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a is running failed: container process not found" containerID="9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 02 14:36:08 crc kubenswrapper[4902]: E1202 14:36:08.666236 4902 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a is running failed: container process not found" probeType="Readiness" pod="openstack/watcher-applier-0" podUID="66b2b80e-c31b-4574-a218-26165964598a" containerName="watcher-applier" Dec 02 14:36:08 crc kubenswrapper[4902]: I1202 14:36:08.741250 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.156:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 14:36:10 crc kubenswrapper[4902]: I1202 14:36:10.556329 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-sl7jl" podUID="ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.120:5353: i/o timeout" Dec 02 14:36:10 crc kubenswrapper[4902]: I1202 14:36:10.557482 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:36:13 crc kubenswrapper[4902]: E1202 14:36:13.665802 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a is running failed: container process not found" containerID="9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 02 14:36:13 crc kubenswrapper[4902]: E1202 14:36:13.666721 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a is running failed: container process not found" containerID="9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 02 14:36:13 crc kubenswrapper[4902]: E1202 14:36:13.667091 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a is running failed: container process not found" containerID="9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 02 14:36:13 crc kubenswrapper[4902]: E1202 14:36:13.667179 4902 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a is running failed: container process not found" probeType="Readiness" pod="openstack/watcher-applier-0" podUID="66b2b80e-c31b-4574-a218-26165964598a" containerName="watcher-applier" Dec 02 14:36:13 crc kubenswrapper[4902]: I1202 14:36:13.742101 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.156:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 14:36:14 crc kubenswrapper[4902]: E1202 14:36:14.096997 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8e3222a115913417d880a13e1e6de805cca664914504a7ab6a851207d1afcee1 is running failed: container process not found" containerID="8e3222a115913417d880a13e1e6de805cca664914504a7ab6a851207d1afcee1" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Dec 02 14:36:14 crc kubenswrapper[4902]: E1202 14:36:14.097399 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8e3222a115913417d880a13e1e6de805cca664914504a7ab6a851207d1afcee1 is running failed: container process not found" containerID="8e3222a115913417d880a13e1e6de805cca664914504a7ab6a851207d1afcee1" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Dec 02 14:36:14 crc kubenswrapper[4902]: E1202 14:36:14.097907 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8e3222a115913417d880a13e1e6de805cca664914504a7ab6a851207d1afcee1 is running failed: container process not found" containerID="8e3222a115913417d880a13e1e6de805cca664914504a7ab6a851207d1afcee1" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Dec 02 14:36:14 crc kubenswrapper[4902]: E1202 14:36:14.097955 4902 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8e3222a115913417d880a13e1e6de805cca664914504a7ab6a851207d1afcee1 is running failed: container process not found" probeType="Readiness" pod="openstack/watcher-decision-engine-0" podUID="04345ae3-20db-4b4d-bf7d-37daf1f39660" containerName="watcher-decision-engine" Dec 02 14:36:15 crc kubenswrapper[4902]: I1202 14:36:15.557661 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-sl7jl" podUID="ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.120:5353: i/o timeout" Dec 02 14:36:15 crc kubenswrapper[4902]: E1202 14:36:15.995918 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 02 14:36:15 crc kubenswrapper[4902]: E1202 14:36:15.996068 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r5qgb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-7n2ss_openstack(9c39a4fe-20e0-458b-a226-d6f5ad9cb846): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 14:36:15 crc kubenswrapper[4902]: E1202 14:36:15.997233 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-7n2ss" podUID="9c39a4fe-20e0-458b-a226-d6f5ad9cb846" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.143762 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.153125 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gmq8h" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.163258 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.207556 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.264792 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-combined-ca-bundle\") pod \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.264868 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-custom-prometheus-ca\") pod \"04345ae3-20db-4b4d-bf7d-37daf1f39660\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.264900 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-combined-ca-bundle\") pod \"04345ae3-20db-4b4d-bf7d-37daf1f39660\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.264975 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04345ae3-20db-4b4d-bf7d-37daf1f39660-logs\") pod \"04345ae3-20db-4b4d-bf7d-37daf1f39660\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.264993 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4p59\" (UniqueName: \"kubernetes.io/projected/04345ae3-20db-4b4d-bf7d-37daf1f39660-kube-api-access-c4p59\") pod \"04345ae3-20db-4b4d-bf7d-37daf1f39660\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.265050 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwvmf\" (UniqueName: \"kubernetes.io/projected/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-kube-api-access-zwvmf\") pod \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.265094 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-config-data\") pod \"04345ae3-20db-4b4d-bf7d-37daf1f39660\" (UID: \"04345ae3-20db-4b4d-bf7d-37daf1f39660\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.265124 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-logs\") pod \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.265164 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-config-data\") pod \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.265196 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7c64325-9e55-496c-b643-2e053ee69bdb-combined-ca-bundle\") pod \"b7c64325-9e55-496c-b643-2e053ee69bdb\" (UID: \"b7c64325-9e55-496c-b643-2e053ee69bdb\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.265217 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2fpw\" (UniqueName: \"kubernetes.io/projected/b7c64325-9e55-496c-b643-2e053ee69bdb-kube-api-access-h2fpw\") pod \"b7c64325-9e55-496c-b643-2e053ee69bdb\" (UID: \"b7c64325-9e55-496c-b643-2e053ee69bdb\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.265232 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-custom-prometheus-ca\") pod \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\" (UID: \"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.265257 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b7c64325-9e55-496c-b643-2e053ee69bdb-config\") pod \"b7c64325-9e55-496c-b643-2e053ee69bdb\" (UID: \"b7c64325-9e55-496c-b643-2e053ee69bdb\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.266060 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-logs" (OuterVolumeSpecName: "logs") pod "d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" (UID: "d61a1a5b-671b-4c8c-9859-3bcfb4819a2a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.268676 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04345ae3-20db-4b4d-bf7d-37daf1f39660-logs" (OuterVolumeSpecName: "logs") pod "04345ae3-20db-4b4d-bf7d-37daf1f39660" (UID: "04345ae3-20db-4b4d-bf7d-37daf1f39660"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.279796 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04345ae3-20db-4b4d-bf7d-37daf1f39660-kube-api-access-c4p59" (OuterVolumeSpecName: "kube-api-access-c4p59") pod "04345ae3-20db-4b4d-bf7d-37daf1f39660" (UID: "04345ae3-20db-4b4d-bf7d-37daf1f39660"). InnerVolumeSpecName "kube-api-access-c4p59". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.279872 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7c64325-9e55-496c-b643-2e053ee69bdb-kube-api-access-h2fpw" (OuterVolumeSpecName: "kube-api-access-h2fpw") pod "b7c64325-9e55-496c-b643-2e053ee69bdb" (UID: "b7c64325-9e55-496c-b643-2e053ee69bdb"). InnerVolumeSpecName "kube-api-access-h2fpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.294248 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-kube-api-access-zwvmf" (OuterVolumeSpecName: "kube-api-access-zwvmf") pod "d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" (UID: "d61a1a5b-671b-4c8c-9859-3bcfb4819a2a"). InnerVolumeSpecName "kube-api-access-zwvmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.319008 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "04345ae3-20db-4b4d-bf7d-37daf1f39660" (UID: "04345ae3-20db-4b4d-bf7d-37daf1f39660"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.321364 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" (UID: "d61a1a5b-671b-4c8c-9859-3bcfb4819a2a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.323005 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7c64325-9e55-496c-b643-2e053ee69bdb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b7c64325-9e55-496c-b643-2e053ee69bdb" (UID: "b7c64325-9e55-496c-b643-2e053ee69bdb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.338808 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" (UID: "d61a1a5b-671b-4c8c-9859-3bcfb4819a2a"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.347270 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-config-data" (OuterVolumeSpecName: "config-data") pod "04345ae3-20db-4b4d-bf7d-37daf1f39660" (UID: "04345ae3-20db-4b4d-bf7d-37daf1f39660"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.356703 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04345ae3-20db-4b4d-bf7d-37daf1f39660" (UID: "04345ae3-20db-4b4d-bf7d-37daf1f39660"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.357757 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7c64325-9e55-496c-b643-2e053ee69bdb-config" (OuterVolumeSpecName: "config") pod "b7c64325-9e55-496c-b643-2e053ee69bdb" (UID: "b7c64325-9e55-496c-b643-2e053ee69bdb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.367378 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-ovsdbserver-sb\") pod \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.367552 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7qdv\" (UniqueName: \"kubernetes.io/projected/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-kube-api-access-w7qdv\") pod \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.367701 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-ovsdbserver-nb\") pod \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.367784 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-dns-svc\") pod \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.368232 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-config\") pod \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\" (UID: \"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a\") " Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.369182 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwvmf\" (UniqueName: \"kubernetes.io/projected/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-kube-api-access-zwvmf\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.369206 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.369247 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.369259 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7c64325-9e55-496c-b643-2e053ee69bdb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.369270 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2fpw\" (UniqueName: \"kubernetes.io/projected/b7c64325-9e55-496c-b643-2e053ee69bdb-kube-api-access-h2fpw\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.369282 4902 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.369316 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b7c64325-9e55-496c-b643-2e053ee69bdb-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.369352 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.369365 4902 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.369403 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04345ae3-20db-4b4d-bf7d-37daf1f39660-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.369415 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04345ae3-20db-4b4d-bf7d-37daf1f39660-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.369426 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4p59\" (UniqueName: \"kubernetes.io/projected/04345ae3-20db-4b4d-bf7d-37daf1f39660-kube-api-access-c4p59\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.374245 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-kube-api-access-w7qdv" (OuterVolumeSpecName: "kube-api-access-w7qdv") pod "ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" (UID: "ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a"). InnerVolumeSpecName "kube-api-access-w7qdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.382023 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-config-data" (OuterVolumeSpecName: "config-data") pod "d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" (UID: "d61a1a5b-671b-4c8c-9859-3bcfb4819a2a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.409701 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-config" (OuterVolumeSpecName: "config") pod "ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" (UID: "ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.427791 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" (UID: "ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.429851 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" (UID: "ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.438205 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" (UID: "ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.470780 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.470820 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.470833 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7qdv\" (UniqueName: \"kubernetes.io/projected/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-kube-api-access-w7qdv\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.470848 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.470862 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.470874 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.977017 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.977014 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"d61a1a5b-671b-4c8c-9859-3bcfb4819a2a","Type":"ContainerDied","Data":"d38450e1da8e6ff683b5c510372562622ddc32b4fa64193dbf0c64f238c0d0a0"} Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.977164 4902 scope.go:117] "RemoveContainer" containerID="8f01cc06b45b46ab4b409325f4a7fc22f4ee0561a47535da1f67f23cca544fc0" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.978763 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"04345ae3-20db-4b4d-bf7d-37daf1f39660","Type":"ContainerDied","Data":"98bf6b62faff88b80b6be999fe63f58317aee854ab2d58d8ebca65b2a7d9fc3f"} Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.978778 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.981221 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-gmq8h" event={"ID":"b7c64325-9e55-496c-b643-2e053ee69bdb","Type":"ContainerDied","Data":"c25701d6c369d37488f1f95ca2935cea75573491e0aabf49d54b954936f9fcfc"} Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.981286 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c25701d6c369d37488f1f95ca2935cea75573491e0aabf49d54b954936f9fcfc" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.981368 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-gmq8h" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.986458 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-sl7jl" Dec 02 14:36:16 crc kubenswrapper[4902]: E1202 14:36:16.986913 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-7n2ss" podUID="9c39a4fe-20e0-458b-a226-d6f5ad9cb846" Dec 02 14:36:16 crc kubenswrapper[4902]: I1202 14:36:16.987032 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-sl7jl" event={"ID":"ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a","Type":"ContainerDied","Data":"4e46659c2ab8a4fede5b3bc9469ab35752d26a0dd57b4c169cceafff0b21508e"} Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.039432 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.054957 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-api-0"] Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.064656 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-sl7jl"] Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.079644 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-sl7jl"] Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.095052 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Dec 02 14:36:17 crc kubenswrapper[4902]: E1202 14:36:17.095464 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" containerName="init" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.095482 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" containerName="init" Dec 02 14:36:17 crc kubenswrapper[4902]: E1202 14:36:17.095495 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" containerName="dnsmasq-dns" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.095502 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" containerName="dnsmasq-dns" Dec 02 14:36:17 crc kubenswrapper[4902]: E1202 14:36:17.095515 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7c64325-9e55-496c-b643-2e053ee69bdb" containerName="neutron-db-sync" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.095520 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7c64325-9e55-496c-b643-2e053ee69bdb" containerName="neutron-db-sync" Dec 02 14:36:17 crc kubenswrapper[4902]: E1202 14:36:17.095540 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerName="watcher-api-log" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.095546 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerName="watcher-api-log" Dec 02 14:36:17 crc kubenswrapper[4902]: E1202 14:36:17.095559 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04345ae3-20db-4b4d-bf7d-37daf1f39660" containerName="watcher-decision-engine" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.095585 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="04345ae3-20db-4b4d-bf7d-37daf1f39660" containerName="watcher-decision-engine" Dec 02 14:36:17 crc kubenswrapper[4902]: E1202 14:36:17.095594 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerName="watcher-api" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.095601 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerName="watcher-api" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.095775 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="04345ae3-20db-4b4d-bf7d-37daf1f39660" containerName="watcher-decision-engine" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.095792 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7c64325-9e55-496c-b643-2e053ee69bdb" containerName="neutron-db-sync" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.095803 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerName="watcher-api-log" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.095818 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerName="watcher-api" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.095828 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" containerName="dnsmasq-dns" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.096818 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.102443 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.102564 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.124553 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" path="/var/lib/kubelet/pods/d61a1a5b-671b-4c8c-9859-3bcfb4819a2a/volumes" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.125893 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" path="/var/lib/kubelet/pods/ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a/volumes" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.126623 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.126645 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.132323 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.133678 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.136441 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.142653 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.185970 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvfzn\" (UniqueName: \"kubernetes.io/projected/15ef989e-35ba-446e-a272-708181f84676-kube-api-access-jvfzn\") pod \"watcher-api-0\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.186015 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-config-data\") pod \"watcher-api-0\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.186231 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15ef989e-35ba-446e-a272-708181f84676-logs\") pod \"watcher-api-0\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.186366 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.186399 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.287512 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.287572 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-config-data\") pod \"watcher-decision-engine-0\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.287648 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f67dc251-e88f-4954-9410-bacba8c2b367-logs\") pod \"watcher-decision-engine-0\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.287675 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.287716 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.287761 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99lhh\" (UniqueName: \"kubernetes.io/projected/f67dc251-e88f-4954-9410-bacba8c2b367-kube-api-access-99lhh\") pod \"watcher-decision-engine-0\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.287801 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvfzn\" (UniqueName: \"kubernetes.io/projected/15ef989e-35ba-446e-a272-708181f84676-kube-api-access-jvfzn\") pod \"watcher-api-0\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.287834 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-config-data\") pod \"watcher-api-0\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.287913 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.287941 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15ef989e-35ba-446e-a272-708181f84676-logs\") pod \"watcher-api-0\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.301648 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.307264 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.311162 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15ef989e-35ba-446e-a272-708181f84676-logs\") pod \"watcher-api-0\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.312042 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvfzn\" (UniqueName: \"kubernetes.io/projected/15ef989e-35ba-446e-a272-708181f84676-kube-api-access-jvfzn\") pod \"watcher-api-0\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.323280 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-config-data\") pod \"watcher-api-0\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.388333 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-99ww9"] Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.389282 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99lhh\" (UniqueName: \"kubernetes.io/projected/f67dc251-e88f-4954-9410-bacba8c2b367-kube-api-access-99lhh\") pod \"watcher-decision-engine-0\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.389388 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.389432 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.389455 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-config-data\") pod \"watcher-decision-engine-0\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.389500 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f67dc251-e88f-4954-9410-bacba8c2b367-logs\") pod \"watcher-decision-engine-0\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.389895 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f67dc251-e88f-4954-9410-bacba8c2b367-logs\") pod \"watcher-decision-engine-0\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.401938 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.402001 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.402202 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-config-data\") pod \"watcher-decision-engine-0\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.414698 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.431523 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-5qhnq"] Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.444241 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99lhh\" (UniqueName: \"kubernetes.io/projected/f67dc251-e88f-4954-9410-bacba8c2b367-kube-api-access-99lhh\") pod \"watcher-decision-engine-0\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.446727 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.450203 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.453480 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-5qhnq"] Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.498001 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-75bd8f464d-cnfhg"] Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.504131 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.506196 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.508302 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-n6cc5" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.508310 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.508893 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.554299 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-75bd8f464d-cnfhg"] Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.599135 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-combined-ca-bundle\") pod \"neutron-75bd8f464d-cnfhg\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.599180 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsmpm\" (UniqueName: \"kubernetes.io/projected/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-kube-api-access-qsmpm\") pod \"neutron-75bd8f464d-cnfhg\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.599226 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-ovndb-tls-certs\") pod \"neutron-75bd8f464d-cnfhg\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.599307 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.599513 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.600091 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-config\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.600277 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-httpd-config\") pod \"neutron-75bd8f464d-cnfhg\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.600434 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.600613 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nntb\" (UniqueName: \"kubernetes.io/projected/8fbb65ec-2bd4-480c-8c37-106cad501bb4-kube-api-access-2nntb\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.600640 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-dns-svc\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.600724 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-config\") pod \"neutron-75bd8f464d-cnfhg\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.702277 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-httpd-config\") pod \"neutron-75bd8f464d-cnfhg\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.702347 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.702398 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nntb\" (UniqueName: \"kubernetes.io/projected/8fbb65ec-2bd4-480c-8c37-106cad501bb4-kube-api-access-2nntb\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.702418 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-dns-svc\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.702447 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-config\") pod \"neutron-75bd8f464d-cnfhg\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.702472 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-combined-ca-bundle\") pod \"neutron-75bd8f464d-cnfhg\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.702489 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsmpm\" (UniqueName: \"kubernetes.io/projected/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-kube-api-access-qsmpm\") pod \"neutron-75bd8f464d-cnfhg\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.702504 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-ovndb-tls-certs\") pod \"neutron-75bd8f464d-cnfhg\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.702517 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.702550 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.702602 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-config\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.703652 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-config\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.707357 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.707425 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.708018 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.708716 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-ovndb-tls-certs\") pod \"neutron-75bd8f464d-cnfhg\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.708750 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-combined-ca-bundle\") pod \"neutron-75bd8f464d-cnfhg\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.713482 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-config\") pod \"neutron-75bd8f464d-cnfhg\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.722582 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-dns-svc\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.727154 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsmpm\" (UniqueName: \"kubernetes.io/projected/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-kube-api-access-qsmpm\") pod \"neutron-75bd8f464d-cnfhg\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: E1202 14:36:17.733977 4902 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 02 14:36:17 crc kubenswrapper[4902]: E1202 14:36:17.734140 4902 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-w9rsp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-rgnm2_openstack(52b3cd40-3726-4b52-8e64-8b15f5f02a99): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.734456 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nntb\" (UniqueName: \"kubernetes.io/projected/8fbb65ec-2bd4-480c-8c37-106cad501bb4-kube-api-access-2nntb\") pod \"dnsmasq-dns-55f844cf75-5qhnq\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: E1202 14:36:17.735356 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-rgnm2" podUID="52b3cd40-3726-4b52-8e64-8b15f5f02a99" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.736093 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-httpd-config\") pod \"neutron-75bd8f464d-cnfhg\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.776954 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.830549 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.840043 4902 scope.go:117] "RemoveContainer" containerID="e181b0bcac7ab8d7b6f7c3c28df1ec4924ea1be74c9ea594c1653a25aae77143" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.853686 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.906067 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66b2b80e-c31b-4574-a218-26165964598a-logs\") pod \"66b2b80e-c31b-4574-a218-26165964598a\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.906504 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvr6g\" (UniqueName: \"kubernetes.io/projected/66b2b80e-c31b-4574-a218-26165964598a-kube-api-access-jvr6g\") pod \"66b2b80e-c31b-4574-a218-26165964598a\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.906532 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66b2b80e-c31b-4574-a218-26165964598a-config-data\") pod \"66b2b80e-c31b-4574-a218-26165964598a\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.906641 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66b2b80e-c31b-4574-a218-26165964598a-combined-ca-bundle\") pod \"66b2b80e-c31b-4574-a218-26165964598a\" (UID: \"66b2b80e-c31b-4574-a218-26165964598a\") " Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.906907 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66b2b80e-c31b-4574-a218-26165964598a-logs" (OuterVolumeSpecName: "logs") pod "66b2b80e-c31b-4574-a218-26165964598a" (UID: "66b2b80e-c31b-4574-a218-26165964598a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.907354 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66b2b80e-c31b-4574-a218-26165964598a-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.918016 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66b2b80e-c31b-4574-a218-26165964598a-kube-api-access-jvr6g" (OuterVolumeSpecName: "kube-api-access-jvr6g") pod "66b2b80e-c31b-4574-a218-26165964598a" (UID: "66b2b80e-c31b-4574-a218-26165964598a"). InnerVolumeSpecName "kube-api-access-jvr6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.966032 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66b2b80e-c31b-4574-a218-26165964598a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "66b2b80e-c31b-4574-a218-26165964598a" (UID: "66b2b80e-c31b-4574-a218-26165964598a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:17 crc kubenswrapper[4902]: I1202 14:36:17.985750 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66b2b80e-c31b-4574-a218-26165964598a-config-data" (OuterVolumeSpecName: "config-data") pod "66b2b80e-c31b-4574-a218-26165964598a" (UID: "66b2b80e-c31b-4574-a218-26165964598a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.009305 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66b2b80e-c31b-4574-a218-26165964598a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.009332 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvr6g\" (UniqueName: \"kubernetes.io/projected/66b2b80e-c31b-4574-a218-26165964598a-kube-api-access-jvr6g\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.009344 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66b2b80e-c31b-4574-a218-26165964598a-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.012157 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"66b2b80e-c31b-4574-a218-26165964598a","Type":"ContainerDied","Data":"4babe4e537278e0c4b196841a400e2ea5f67113835056149df808460ac3589e3"} Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.012261 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 02 14:36:18 crc kubenswrapper[4902]: E1202 14:36:18.048739 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-rgnm2" podUID="52b3cd40-3726-4b52-8e64-8b15f5f02a99" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.100151 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-applier-0"] Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.103005 4902 scope.go:117] "RemoveContainer" containerID="8e3222a115913417d880a13e1e6de805cca664914504a7ab6a851207d1afcee1" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.123176 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-applier-0"] Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.133374 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-applier-0"] Dec 02 14:36:18 crc kubenswrapper[4902]: E1202 14:36:18.133926 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b2b80e-c31b-4574-a218-26165964598a" containerName="watcher-applier" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.133941 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b2b80e-c31b-4574-a218-26165964598a" containerName="watcher-applier" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.134127 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b2b80e-c31b-4574-a218-26165964598a" containerName="watcher-applier" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.134815 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.138554 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-applier-config-data" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.148371 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.212540 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/644d8761-1b3a-4a6f-922f-596ae390bacf-logs\") pod \"watcher-applier-0\" (UID: \"644d8761-1b3a-4a6f-922f-596ae390bacf\") " pod="openstack/watcher-applier-0" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.212790 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/644d8761-1b3a-4a6f-922f-596ae390bacf-config-data\") pod \"watcher-applier-0\" (UID: \"644d8761-1b3a-4a6f-922f-596ae390bacf\") " pod="openstack/watcher-applier-0" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.212847 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkq5t\" (UniqueName: \"kubernetes.io/projected/644d8761-1b3a-4a6f-922f-596ae390bacf-kube-api-access-jkq5t\") pod \"watcher-applier-0\" (UID: \"644d8761-1b3a-4a6f-922f-596ae390bacf\") " pod="openstack/watcher-applier-0" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.213042 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/644d8761-1b3a-4a6f-922f-596ae390bacf-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"644d8761-1b3a-4a6f-922f-596ae390bacf\") " pod="openstack/watcher-applier-0" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.314937 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/644d8761-1b3a-4a6f-922f-596ae390bacf-config-data\") pod \"watcher-applier-0\" (UID: \"644d8761-1b3a-4a6f-922f-596ae390bacf\") " pod="openstack/watcher-applier-0" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.314994 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkq5t\" (UniqueName: \"kubernetes.io/projected/644d8761-1b3a-4a6f-922f-596ae390bacf-kube-api-access-jkq5t\") pod \"watcher-applier-0\" (UID: \"644d8761-1b3a-4a6f-922f-596ae390bacf\") " pod="openstack/watcher-applier-0" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.316071 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/644d8761-1b3a-4a6f-922f-596ae390bacf-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"644d8761-1b3a-4a6f-922f-596ae390bacf\") " pod="openstack/watcher-applier-0" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.316144 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/644d8761-1b3a-4a6f-922f-596ae390bacf-logs\") pod \"watcher-applier-0\" (UID: \"644d8761-1b3a-4a6f-922f-596ae390bacf\") " pod="openstack/watcher-applier-0" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.316515 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/644d8761-1b3a-4a6f-922f-596ae390bacf-logs\") pod \"watcher-applier-0\" (UID: \"644d8761-1b3a-4a6f-922f-596ae390bacf\") " pod="openstack/watcher-applier-0" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.323215 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/644d8761-1b3a-4a6f-922f-596ae390bacf-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"644d8761-1b3a-4a6f-922f-596ae390bacf\") " pod="openstack/watcher-applier-0" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.323974 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/644d8761-1b3a-4a6f-922f-596ae390bacf-config-data\") pod \"watcher-applier-0\" (UID: \"644d8761-1b3a-4a6f-922f-596ae390bacf\") " pod="openstack/watcher-applier-0" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.334762 4902 scope.go:117] "RemoveContainer" containerID="11e3c5ae1feb2872ebb2faa127d97f7d977b2e6386b642120c965de90fa72407" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.339754 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkq5t\" (UniqueName: \"kubernetes.io/projected/644d8761-1b3a-4a6f-922f-596ae390bacf-kube-api-access-jkq5t\") pod \"watcher-applier-0\" (UID: \"644d8761-1b3a-4a6f-922f-596ae390bacf\") " pod="openstack/watcher-applier-0" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.371721 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-54c4bbdfbb-v8pjf"] Dec 02 14:36:18 crc kubenswrapper[4902]: W1202 14:36:18.386406 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod462ead25_02ec_4791_b927_56cf3f26ca39.slice/crio-2cfb91027f853102de8650c54484d735dccbfcab3c23005462f938a1536d4a8a WatchSource:0}: Error finding container 2cfb91027f853102de8650c54484d735dccbfcab3c23005462f938a1536d4a8a: Status 404 returned error can't find the container with id 2cfb91027f853102de8650c54484d735dccbfcab3c23005462f938a1536d4a8a Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.404762 4902 scope.go:117] "RemoveContainer" containerID="e3a8ee5f87c5570cbacf6f2528a8af9027bfaff15fe1940fefec9f0f5d2646fe" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.542025 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.597583 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-99ww9"] Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.624149 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-85bf5878d8-bn7cr"] Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.646398 4902 scope.go:117] "RemoveContainer" containerID="9878ef1a9b61b8e54142d38b6595017c70817d23fdd6406354f6b29689bb975a" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.650913 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-sr89q"] Dec 02 14:36:18 crc kubenswrapper[4902]: W1202 14:36:18.662144 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6eab04a1_6320_449e_9f31_600aa46a57b0.slice/crio-6eab603030d2831bb48c4d8a43b8ef55e3192edcf14c9d5797183544edbbf777 WatchSource:0}: Error finding container 6eab603030d2831bb48c4d8a43b8ef55e3192edcf14c9d5797183544edbbf777: Status 404 returned error can't find the container with id 6eab603030d2831bb48c4d8a43b8ef55e3192edcf14c9d5797183544edbbf777 Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.727845 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.735553 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 02 14:36:18 crc kubenswrapper[4902]: I1202 14:36:18.747832 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="d61a1a5b-671b-4c8c-9859-3bcfb4819a2a" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.156:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 14:36:18 crc kubenswrapper[4902]: W1202 14:36:18.762889 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e6b2ee2_3aac_4805_afc2_72632b4b82ab.slice/crio-1a142c6f6a3d3cb88e92f474af1279e7ae0c01c5992cb078e654b39e8de5a63c WatchSource:0}: Error finding container 1a142c6f6a3d3cb88e92f474af1279e7ae0c01c5992cb078e654b39e8de5a63c: Status 404 returned error can't find the container with id 1a142c6f6a3d3cb88e92f474af1279e7ae0c01c5992cb078e654b39e8de5a63c Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.053185 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97eb051a-3187-447f-8bf6-71ae3d8f65d7","Type":"ContainerStarted","Data":"879e345ac3d76cfa21c5fabe88893c7255e3e883ab581ed742ae9e95511719ba"} Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.082721 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" event={"ID":"29642134-e5dd-44f7-aa97-a3b70af91779","Type":"ContainerStarted","Data":"42a397a4193aca22f30070652fe362654bf7c6d5333472a5550e9dd093e85633"} Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.135762 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04345ae3-20db-4b4d-bf7d-37daf1f39660" path="/var/lib/kubelet/pods/04345ae3-20db-4b4d-bf7d-37daf1f39660/volumes" Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.142014 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66b2b80e-c31b-4574-a218-26165964598a" path="/var/lib/kubelet/pods/66b2b80e-c31b-4574-a218-26165964598a/volumes" Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.143452 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-ff5b6d47-dgpgd" event={"ID":"318a2e77-a615-4af8-bade-d574dd90ca91","Type":"ContainerStarted","Data":"6f7e670709e8e0842994de3af86ecc3e1d12533a9c5031a5d8ac8b6a608bbf2c"} Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.146160 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-5qhnq"] Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.151905 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6cdbd8859c-g8rmc" event={"ID":"705850b6-c4e0-43ae-a1b2-1a8c536b97bd","Type":"ContainerStarted","Data":"bd75a7da1a4e580e605781dffd69856bad4b9b2e0d9ea492f0939f90a56b60c1"} Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.162647 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.170749 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-54c4bbdfbb-v8pjf" event={"ID":"462ead25-02ec-4791-b927-56cf3f26ca39","Type":"ContainerStarted","Data":"2cfb91027f853102de8650c54484d735dccbfcab3c23005462f938a1536d4a8a"} Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.192700 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sr89q" event={"ID":"082d182d-ad5d-4d70-ab12-bac95950cc73","Type":"ContainerStarted","Data":"f8c26fc2837ce2bb2ec7399ba6c471d92599f6bef334de27cac20422282997fd"} Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.207191 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85bf5878d8-bn7cr" event={"ID":"6eab04a1-6320-449e-9f31-600aa46a57b0","Type":"ContainerStarted","Data":"6eab603030d2831bb48c4d8a43b8ef55e3192edcf14c9d5797183544edbbf777"} Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.213213 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9e6b2ee2-3aac-4805-afc2-72632b4b82ab","Type":"ContainerStarted","Data":"1a142c6f6a3d3cb88e92f474af1279e7ae0c01c5992cb078e654b39e8de5a63c"} Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.318988 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.329255 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.337874 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 14:36:19 crc kubenswrapper[4902]: W1202 14:36:19.401935 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6728665d_94d1_4dae_904e_17a2c82959be.slice/crio-76842f5ef2e6c4d103ee7c43329028506e8f7dfd74252bcff0d89474f7b2083d WatchSource:0}: Error finding container 76842f5ef2e6c4d103ee7c43329028506e8f7dfd74252bcff0d89474f7b2083d: Status 404 returned error can't find the container with id 76842f5ef2e6c4d103ee7c43329028506e8f7dfd74252bcff0d89474f7b2083d Dec 02 14:36:19 crc kubenswrapper[4902]: I1202 14:36:19.433540 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-75bd8f464d-cnfhg"] Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.025650 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7bffdc6c65-srhkc"] Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.027831 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.031509 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.031749 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.054824 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7bffdc6c65-srhkc"] Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.066069 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-httpd-config\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.066114 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-config\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.066166 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-internal-tls-certs\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.066232 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-ovndb-tls-certs\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.066290 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-public-tls-certs\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.066310 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kl7tx\" (UniqueName: \"kubernetes.io/projected/04c855a2-1a97-4b81-83c2-d3b51678293d-kube-api-access-kl7tx\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.066331 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-combined-ca-bundle\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.167881 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-internal-tls-certs\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.167974 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-ovndb-tls-certs\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.168063 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-public-tls-certs\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.168091 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kl7tx\" (UniqueName: \"kubernetes.io/projected/04c855a2-1a97-4b81-83c2-d3b51678293d-kube-api-access-kl7tx\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.168117 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-combined-ca-bundle\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.168258 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-httpd-config\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.168291 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-config\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.197756 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-httpd-config\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.208443 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-config\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.210032 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-public-tls-certs\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.212763 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kl7tx\" (UniqueName: \"kubernetes.io/projected/04c855a2-1a97-4b81-83c2-d3b51678293d-kube-api-access-kl7tx\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.213307 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-internal-tls-certs\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.213498 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-ovndb-tls-certs\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.242322 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c855a2-1a97-4b81-83c2-d3b51678293d-combined-ca-bundle\") pod \"neutron-7bffdc6c65-srhkc\" (UID: \"04c855a2-1a97-4b81-83c2-d3b51678293d\") " pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.286816 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85bf5878d8-bn7cr" event={"ID":"6eab04a1-6320-449e-9f31-600aa46a57b0","Type":"ContainerStarted","Data":"b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8"} Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.302296 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"15ef989e-35ba-446e-a272-708181f84676","Type":"ContainerStarted","Data":"33412b5da06aa1e05f0df1831faab11b06392acbf86a87db681ca464c786f37e"} Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.308734 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"644d8761-1b3a-4a6f-922f-596ae390bacf","Type":"ContainerStarted","Data":"7be1681bbfefb2668b97c6a53cc524aea4a915b237d38e7122f02b54aadfff15"} Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.319995 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-q9f9f" event={"ID":"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9","Type":"ContainerStarted","Data":"c570b9171f63f7ba00919ddd33666fbba8786ee70486ddf871a2ec9d2310126e"} Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.340807 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6fc8fd9965-7vwnt" event={"ID":"dcf00bba-e4b0-46e4-afcd-c4a475ff5047","Type":"ContainerStarted","Data":"9eedcce42209e0a9f4b55b6908860a717e63479586c12d555412f4a2fa66db48"} Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.362387 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-q9f9f" podStartSLOduration=5.255005885 podStartE2EDuration="39.36236079s" podCreationTimestamp="2025-12-02 14:35:41 +0000 UTC" firstStartedPulling="2025-12-02 14:35:43.740834815 +0000 UTC m=+1174.932143524" lastFinishedPulling="2025-12-02 14:36:17.84818972 +0000 UTC m=+1209.039498429" observedRunningTime="2025-12-02 14:36:20.348894041 +0000 UTC m=+1211.540202750" watchObservedRunningTime="2025-12-02 14:36:20.36236079 +0000 UTC m=+1211.553669489" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.370775 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6cdbd8859c-g8rmc" event={"ID":"705850b6-c4e0-43ae-a1b2-1a8c536b97bd","Type":"ContainerStarted","Data":"2236e2125cfd0dd2462179437852250a37fae12330f0da057403262ac6598fdc"} Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.370930 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6cdbd8859c-g8rmc" podUID="705850b6-c4e0-43ae-a1b2-1a8c536b97bd" containerName="horizon-log" containerID="cri-o://bd75a7da1a4e580e605781dffd69856bad4b9b2e0d9ea492f0939f90a56b60c1" gracePeriod=30 Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.371323 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6cdbd8859c-g8rmc" podUID="705850b6-c4e0-43ae-a1b2-1a8c536b97bd" containerName="horizon" containerID="cri-o://2236e2125cfd0dd2462179437852250a37fae12330f0da057403262ac6598fdc" gracePeriod=30 Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.387300 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-54c4bbdfbb-v8pjf" event={"ID":"462ead25-02ec-4791-b927-56cf3f26ca39","Type":"ContainerStarted","Data":"de29adb1137f24d5d2936ed5a1079db53836ea33437a8a78d0e90e5177bad004"} Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.389240 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67dc251-e88f-4954-9410-bacba8c2b367","Type":"ContainerStarted","Data":"70162a8bebd26998d9d9e8dd26ecdea4853eb054266cd648540c36f86a3d229c"} Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.390297 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" event={"ID":"8fbb65ec-2bd4-480c-8c37-106cad501bb4","Type":"ContainerStarted","Data":"7e59011e8fde7da3d351321d186590ff8dab3e2c42b836cc4dc53dc331d3fb85"} Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.391065 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-75bd8f464d-cnfhg" event={"ID":"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c","Type":"ContainerStarted","Data":"d06237c0441a8bcfb905045362575a3a55a04184ec79fbc57e47bf77ced2758d"} Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.392084 4902 generic.go:334] "Generic (PLEG): container finished" podID="29642134-e5dd-44f7-aa97-a3b70af91779" containerID="cf73934bd7d5e1e42bda4dddb455ee05db879a1243f1136360db4919adabe08f" exitCode=0 Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.392122 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" event={"ID":"29642134-e5dd-44f7-aa97-a3b70af91779","Type":"ContainerDied","Data":"cf73934bd7d5e1e42bda4dddb455ee05db879a1243f1136360db4919adabe08f"} Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.394733 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6728665d-94d1-4dae-904e-17a2c82959be","Type":"ContainerStarted","Data":"76842f5ef2e6c4d103ee7c43329028506e8f7dfd74252bcff0d89474f7b2083d"} Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.417842 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6cdbd8859c-g8rmc" podStartSLOduration=7.111692105 podStartE2EDuration="39.417630544s" podCreationTimestamp="2025-12-02 14:35:41 +0000 UTC" firstStartedPulling="2025-12-02 14:35:43.706537651 +0000 UTC m=+1174.897846360" lastFinishedPulling="2025-12-02 14:36:16.01247609 +0000 UTC m=+1207.203784799" observedRunningTime="2025-12-02 14:36:20.405091812 +0000 UTC m=+1211.596400521" watchObservedRunningTime="2025-12-02 14:36:20.417630544 +0000 UTC m=+1211.608939253" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.460280 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:20 crc kubenswrapper[4902]: I1202 14:36:20.558422 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-sl7jl" podUID="ee866db4-a0e7-4aad-8a8f-cd0e81b36c8a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.120:5353: i/o timeout" Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.408697 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-ff5b6d47-dgpgd" event={"ID":"318a2e77-a615-4af8-bade-d574dd90ca91","Type":"ContainerStarted","Data":"d71cc061eb90f6c52cc70e264560970e0903d0adc87c344cb8b96c161dbab470"} Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.408827 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-ff5b6d47-dgpgd" podUID="318a2e77-a615-4af8-bade-d574dd90ca91" containerName="horizon-log" containerID="cri-o://6f7e670709e8e0842994de3af86ecc3e1d12533a9c5031a5d8ac8b6a608bbf2c" gracePeriod=30 Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.408888 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-ff5b6d47-dgpgd" podUID="318a2e77-a615-4af8-bade-d574dd90ca91" containerName="horizon" containerID="cri-o://d71cc061eb90f6c52cc70e264560970e0903d0adc87c344cb8b96c161dbab470" gracePeriod=30 Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.414494 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"644d8761-1b3a-4a6f-922f-596ae390bacf","Type":"ContainerStarted","Data":"33b20932e61889c280d2d8c1307a356dcd9927b7905eb1e01f54f000ec313caf"} Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.420892 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6fc8fd9965-7vwnt" event={"ID":"dcf00bba-e4b0-46e4-afcd-c4a475ff5047","Type":"ContainerStarted","Data":"31fa4fd7dccceddb5792c84cf234eebaa6816a9d8673b80e34cbc681a2ef4189"} Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.421039 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6fc8fd9965-7vwnt" podUID="dcf00bba-e4b0-46e4-afcd-c4a475ff5047" containerName="horizon-log" containerID="cri-o://9eedcce42209e0a9f4b55b6908860a717e63479586c12d555412f4a2fa66db48" gracePeriod=30 Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.421119 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6fc8fd9965-7vwnt" podUID="dcf00bba-e4b0-46e4-afcd-c4a475ff5047" containerName="horizon" containerID="cri-o://31fa4fd7dccceddb5792c84cf234eebaa6816a9d8673b80e34cbc681a2ef4189" gracePeriod=30 Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.432294 4902 generic.go:334] "Generic (PLEG): container finished" podID="8fbb65ec-2bd4-480c-8c37-106cad501bb4" containerID="8dce742eede555127ac5370d816e48082c2ca90d67ac6c41d67d926ece1b2e5f" exitCode=0 Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.432399 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" event={"ID":"8fbb65ec-2bd4-480c-8c37-106cad501bb4","Type":"ContainerDied","Data":"8dce742eede555127ac5370d816e48082c2ca90d67ac6c41d67d926ece1b2e5f"} Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.439710 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sr89q" event={"ID":"082d182d-ad5d-4d70-ab12-bac95950cc73","Type":"ContainerStarted","Data":"f989e94db4d2c2cba09db820d4f65afbabc25c5810021646cdafbe57c6a7c61c"} Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.439944 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-ff5b6d47-dgpgd" podStartSLOduration=6.339167803 podStartE2EDuration="40.439922462s" podCreationTimestamp="2025-12-02 14:35:41 +0000 UTC" firstStartedPulling="2025-12-02 14:35:43.704543645 +0000 UTC m=+1174.895852354" lastFinishedPulling="2025-12-02 14:36:17.805298314 +0000 UTC m=+1208.996607013" observedRunningTime="2025-12-02 14:36:21.432159594 +0000 UTC m=+1212.623468303" watchObservedRunningTime="2025-12-02 14:36:21.439922462 +0000 UTC m=+1212.631231171" Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.441555 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85bf5878d8-bn7cr" event={"ID":"6eab04a1-6320-449e-9f31-600aa46a57b0","Type":"ContainerStarted","Data":"462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924"} Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.442643 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"15ef989e-35ba-446e-a272-708181f84676","Type":"ContainerStarted","Data":"206f480261c17ee366d079c8311055bd4dabd360c1483c60b9639af5d0fcbc62"} Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.443750 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67dc251-e88f-4954-9410-bacba8c2b367","Type":"ContainerStarted","Data":"be95115dbb74b92e24593b3465532ac2427dfbfeb51637d1ba0d5899244232ff"} Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.445415 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9e6b2ee2-3aac-4805-afc2-72632b4b82ab","Type":"ContainerStarted","Data":"cd543239bb5ae8531db62adad1d8d8f0c61a914abc1d451008d1786b18637cff"} Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.450151 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-75bd8f464d-cnfhg" event={"ID":"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c","Type":"ContainerStarted","Data":"e70e649b966f159ca38555efb648a7d2e58df64c8faddb40e1d59e95314ffa61"} Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.461951 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-54c4bbdfbb-v8pjf" event={"ID":"462ead25-02ec-4791-b927-56cf3f26ca39","Type":"ContainerStarted","Data":"81aabead2d395eea485ee636b9d1f19b73aabfde885dbcdbbd3f41956401fb29"} Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.461950 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-applier-0" podStartSLOduration=3.461931691 podStartE2EDuration="3.461931691s" podCreationTimestamp="2025-12-02 14:36:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:21.456606191 +0000 UTC m=+1212.647914900" watchObservedRunningTime="2025-12-02 14:36:21.461931691 +0000 UTC m=+1212.653240390" Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.475160 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6728665d-94d1-4dae-904e-17a2c82959be","Type":"ContainerStarted","Data":"bf3fb75f250cc342eec18e2f28caacec1d7472e4d5f57c68c053af5acaebc259"} Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.480630 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6fc8fd9965-7vwnt" podStartSLOduration=5.363449716 podStartE2EDuration="37.480612996s" podCreationTimestamp="2025-12-02 14:35:44 +0000 UTC" firstStartedPulling="2025-12-02 14:35:45.732405759 +0000 UTC m=+1176.923714468" lastFinishedPulling="2025-12-02 14:36:17.849569039 +0000 UTC m=+1209.040877748" observedRunningTime="2025-12-02 14:36:21.476965064 +0000 UTC m=+1212.668273773" watchObservedRunningTime="2025-12-02 14:36:21.480612996 +0000 UTC m=+1212.671921705" Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.503401 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-85bf5878d8-bn7cr" podStartSLOduration=31.503386066 podStartE2EDuration="31.503386066s" podCreationTimestamp="2025-12-02 14:35:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:21.503035117 +0000 UTC m=+1212.694343886" watchObservedRunningTime="2025-12-02 14:36:21.503386066 +0000 UTC m=+1212.694694775" Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.540821 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=4.540801129 podStartE2EDuration="4.540801129s" podCreationTimestamp="2025-12-02 14:36:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:21.518234434 +0000 UTC m=+1212.709543143" watchObservedRunningTime="2025-12-02 14:36:21.540801129 +0000 UTC m=+1212.732109838" Dec 02 14:36:21 crc kubenswrapper[4902]: I1202 14:36:21.543151 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-54c4bbdfbb-v8pjf" podStartSLOduration=31.543144255 podStartE2EDuration="31.543144255s" podCreationTimestamp="2025-12-02 14:35:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:21.54013959 +0000 UTC m=+1212.731448299" watchObservedRunningTime="2025-12-02 14:36:21.543144255 +0000 UTC m=+1212.734452964" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.049677 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.277892 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.325178 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-sr89q" podStartSLOduration=29.325137315 podStartE2EDuration="29.325137315s" podCreationTimestamp="2025-12-02 14:35:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:21.626873319 +0000 UTC m=+1212.818182028" watchObservedRunningTime="2025-12-02 14:36:22.325137315 +0000 UTC m=+1213.516446024" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.357448 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-ovsdbserver-nb\") pod \"29642134-e5dd-44f7-aa97-a3b70af91779\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.357516 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-dns-svc\") pod \"29642134-e5dd-44f7-aa97-a3b70af91779\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.357672 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-config\") pod \"29642134-e5dd-44f7-aa97-a3b70af91779\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.357769 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-dns-swift-storage-0\") pod \"29642134-e5dd-44f7-aa97-a3b70af91779\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.357840 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjdh2\" (UniqueName: \"kubernetes.io/projected/29642134-e5dd-44f7-aa97-a3b70af91779-kube-api-access-mjdh2\") pod \"29642134-e5dd-44f7-aa97-a3b70af91779\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.357920 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-ovsdbserver-sb\") pod \"29642134-e5dd-44f7-aa97-a3b70af91779\" (UID: \"29642134-e5dd-44f7-aa97-a3b70af91779\") " Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.371741 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29642134-e5dd-44f7-aa97-a3b70af91779-kube-api-access-mjdh2" (OuterVolumeSpecName: "kube-api-access-mjdh2") pod "29642134-e5dd-44f7-aa97-a3b70af91779" (UID: "29642134-e5dd-44f7-aa97-a3b70af91779"). InnerVolumeSpecName "kube-api-access-mjdh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.399353 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "29642134-e5dd-44f7-aa97-a3b70af91779" (UID: "29642134-e5dd-44f7-aa97-a3b70af91779"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.432146 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "29642134-e5dd-44f7-aa97-a3b70af91779" (UID: "29642134-e5dd-44f7-aa97-a3b70af91779"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.462931 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjdh2\" (UniqueName: \"kubernetes.io/projected/29642134-e5dd-44f7-aa97-a3b70af91779-kube-api-access-mjdh2\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.462971 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.462980 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.469359 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-config" (OuterVolumeSpecName: "config") pod "29642134-e5dd-44f7-aa97-a3b70af91779" (UID: "29642134-e5dd-44f7-aa97-a3b70af91779"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.493989 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "29642134-e5dd-44f7-aa97-a3b70af91779" (UID: "29642134-e5dd-44f7-aa97-a3b70af91779"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.499519 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.514508 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "29642134-e5dd-44f7-aa97-a3b70af91779" (UID: "29642134-e5dd-44f7-aa97-a3b70af91779"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.532546 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" event={"ID":"29642134-e5dd-44f7-aa97-a3b70af91779","Type":"ContainerDied","Data":"42a397a4193aca22f30070652fe362654bf7c6d5333472a5550e9dd093e85633"} Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.532622 4902 scope.go:117] "RemoveContainer" containerID="cf73934bd7d5e1e42bda4dddb455ee05db879a1243f1136360db4919adabe08f" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.533295 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-99ww9" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.566206 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.566233 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.566242 4902 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/29642134-e5dd-44f7-aa97-a3b70af91779-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.709664 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-99ww9"] Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.801410 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-99ww9"] Dec 02 14:36:22 crc kubenswrapper[4902]: I1202 14:36:22.921384 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7bffdc6c65-srhkc"] Dec 02 14:36:22 crc kubenswrapper[4902]: W1202 14:36:22.994746 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04c855a2_1a97_4b81_83c2_d3b51678293d.slice/crio-4e761fad0f598d99df2f96944069dae566295cff01f07e27c84abb5c98e5d7bb WatchSource:0}: Error finding container 4e761fad0f598d99df2f96944069dae566295cff01f07e27c84abb5c98e5d7bb: Status 404 returned error can't find the container with id 4e761fad0f598d99df2f96944069dae566295cff01f07e27c84abb5c98e5d7bb Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.153925 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29642134-e5dd-44f7-aa97-a3b70af91779" path="/var/lib/kubelet/pods/29642134-e5dd-44f7-aa97-a3b70af91779/volumes" Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.543670 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-applier-0" Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.569933 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"15ef989e-35ba-446e-a272-708181f84676","Type":"ContainerStarted","Data":"0ee92cbb2b962b3eb4f659056f499805f2f16ecd03907db9f3543bfc00f7fce5"} Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.570002 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.575176 4902 generic.go:334] "Generic (PLEG): container finished" podID="4666a144-4ec6-43f9-b3d9-74d0bef2e9a9" containerID="c570b9171f63f7ba00919ddd33666fbba8786ee70486ddf871a2ec9d2310126e" exitCode=0 Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.575243 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-q9f9f" event={"ID":"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9","Type":"ContainerDied","Data":"c570b9171f63f7ba00919ddd33666fbba8786ee70486ddf871a2ec9d2310126e"} Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.583045 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" event={"ID":"8fbb65ec-2bd4-480c-8c37-106cad501bb4","Type":"ContainerStarted","Data":"4777eb6fa257da588902e78b521c57efea970722d0f164fb8dc5613243aad33d"} Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.583174 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.587033 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-75bd8f464d-cnfhg" event={"ID":"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c","Type":"ContainerStarted","Data":"8c2e85f88f046ec76e1ebc10ea8c97f4f5117270855f91fc596e16ac337c19bc"} Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.587219 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.591587 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=6.591562798 podStartE2EDuration="6.591562798s" podCreationTimestamp="2025-12-02 14:36:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:23.584710225 +0000 UTC m=+1214.776018934" watchObservedRunningTime="2025-12-02 14:36:23.591562798 +0000 UTC m=+1214.782871507" Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.596699 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97eb051a-3187-447f-8bf6-71ae3d8f65d7","Type":"ContainerStarted","Data":"013164b6696fd7149ad61d243d570d02b3d5bba4c3f2455b75da7bdb542d8820"} Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.606392 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7bffdc6c65-srhkc" event={"ID":"04c855a2-1a97-4b81-83c2-d3b51678293d","Type":"ContainerStarted","Data":"74e1529acabcb46d39afa5ea6f6e70dd36b992e4a4463242371e841a776c0ae4"} Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.606425 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7bffdc6c65-srhkc" event={"ID":"04c855a2-1a97-4b81-83c2-d3b51678293d","Type":"ContainerStarted","Data":"4e761fad0f598d99df2f96944069dae566295cff01f07e27c84abb5c98e5d7bb"} Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.617412 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" podStartSLOduration=6.617394073 podStartE2EDuration="6.617394073s" podCreationTimestamp="2025-12-02 14:36:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:23.60763674 +0000 UTC m=+1214.798945449" watchObservedRunningTime="2025-12-02 14:36:23.617394073 +0000 UTC m=+1214.808702782" Dec 02 14:36:23 crc kubenswrapper[4902]: I1202 14:36:23.650983 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-75bd8f464d-cnfhg" podStartSLOduration=6.650962377 podStartE2EDuration="6.650962377s" podCreationTimestamp="2025-12-02 14:36:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:23.643104336 +0000 UTC m=+1214.834413045" watchObservedRunningTime="2025-12-02 14:36:23.650962377 +0000 UTC m=+1214.842271086" Dec 02 14:36:24 crc kubenswrapper[4902]: I1202 14:36:24.615748 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9e6b2ee2-3aac-4805-afc2-72632b4b82ab","Type":"ContainerStarted","Data":"e6cf738cabd31ebf93d40aea121eed14157de70783a31b6bcd8f0ea1c3865380"} Dec 02 14:36:24 crc kubenswrapper[4902]: I1202 14:36:24.615893 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9e6b2ee2-3aac-4805-afc2-72632b4b82ab" containerName="glance-log" containerID="cri-o://cd543239bb5ae8531db62adad1d8d8f0c61a914abc1d451008d1786b18637cff" gracePeriod=30 Dec 02 14:36:24 crc kubenswrapper[4902]: I1202 14:36:24.616206 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9e6b2ee2-3aac-4805-afc2-72632b4b82ab" containerName="glance-httpd" containerID="cri-o://e6cf738cabd31ebf93d40aea121eed14157de70783a31b6bcd8f0ea1c3865380" gracePeriod=30 Dec 02 14:36:24 crc kubenswrapper[4902]: I1202 14:36:24.619437 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7bffdc6c65-srhkc" event={"ID":"04c855a2-1a97-4b81-83c2-d3b51678293d","Type":"ContainerStarted","Data":"ac2f2c682f07908637a6f72435bf033450d666649210ab1aca6155e7fea8584e"} Dec 02 14:36:24 crc kubenswrapper[4902]: I1202 14:36:24.619908 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:24 crc kubenswrapper[4902]: I1202 14:36:24.621850 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6728665d-94d1-4dae-904e-17a2c82959be" containerName="glance-log" containerID="cri-o://bf3fb75f250cc342eec18e2f28caacec1d7472e4d5f57c68c053af5acaebc259" gracePeriod=30 Dec 02 14:36:24 crc kubenswrapper[4902]: I1202 14:36:24.621928 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6728665d-94d1-4dae-904e-17a2c82959be","Type":"ContainerStarted","Data":"b9c16cac248c0f08e30858bb10969b3a47486a5dd35987033e74e61223c0ea18"} Dec 02 14:36:24 crc kubenswrapper[4902]: I1202 14:36:24.623693 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6728665d-94d1-4dae-904e-17a2c82959be" containerName="glance-httpd" containerID="cri-o://b9c16cac248c0f08e30858bb10969b3a47486a5dd35987033e74e61223c0ea18" gracePeriod=30 Dec 02 14:36:24 crc kubenswrapper[4902]: I1202 14:36:24.641825 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:36:24 crc kubenswrapper[4902]: I1202 14:36:24.674170 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7bffdc6c65-srhkc" podStartSLOduration=5.67414893 podStartE2EDuration="5.67414893s" podCreationTimestamp="2025-12-02 14:36:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:24.669399757 +0000 UTC m=+1215.860708466" watchObservedRunningTime="2025-12-02 14:36:24.67414893 +0000 UTC m=+1215.865457639" Dec 02 14:36:24 crc kubenswrapper[4902]: I1202 14:36:24.683359 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=22.683332518 podStartE2EDuration="22.683332518s" podCreationTimestamp="2025-12-02 14:36:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:24.636173342 +0000 UTC m=+1215.827482061" watchObservedRunningTime="2025-12-02 14:36:24.683332518 +0000 UTC m=+1215.874641237" Dec 02 14:36:24 crc kubenswrapper[4902]: I1202 14:36:24.698339 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=22.69832171 podStartE2EDuration="22.69832171s" podCreationTimestamp="2025-12-02 14:36:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:24.692091255 +0000 UTC m=+1215.883399964" watchObservedRunningTime="2025-12-02 14:36:24.69832171 +0000 UTC m=+1215.889630419" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.190761 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-q9f9f" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.351200 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-config-data\") pod \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.351342 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-scripts\") pod \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.351459 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6k2rq\" (UniqueName: \"kubernetes.io/projected/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-kube-api-access-6k2rq\") pod \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.351492 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-logs\") pod \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.351555 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-combined-ca-bundle\") pod \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\" (UID: \"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9\") " Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.352704 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-logs" (OuterVolumeSpecName: "logs") pod "4666a144-4ec6-43f9-b3d9-74d0bef2e9a9" (UID: "4666a144-4ec6-43f9-b3d9-74d0bef2e9a9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.360732 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-scripts" (OuterVolumeSpecName: "scripts") pod "4666a144-4ec6-43f9-b3d9-74d0bef2e9a9" (UID: "4666a144-4ec6-43f9-b3d9-74d0bef2e9a9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.374809 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-kube-api-access-6k2rq" (OuterVolumeSpecName: "kube-api-access-6k2rq") pod "4666a144-4ec6-43f9-b3d9-74d0bef2e9a9" (UID: "4666a144-4ec6-43f9-b3d9-74d0bef2e9a9"). InnerVolumeSpecName "kube-api-access-6k2rq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.431686 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4666a144-4ec6-43f9-b3d9-74d0bef2e9a9" (UID: "4666a144-4ec6-43f9-b3d9-74d0bef2e9a9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.446714 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-config-data" (OuterVolumeSpecName: "config-data") pod "4666a144-4ec6-43f9-b3d9-74d0bef2e9a9" (UID: "4666a144-4ec6-43f9-b3d9-74d0bef2e9a9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.456579 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6k2rq\" (UniqueName: \"kubernetes.io/projected/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-kube-api-access-6k2rq\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.456626 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.456638 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.456647 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.456658 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.662133 4902 generic.go:334] "Generic (PLEG): container finished" podID="6728665d-94d1-4dae-904e-17a2c82959be" containerID="b9c16cac248c0f08e30858bb10969b3a47486a5dd35987033e74e61223c0ea18" exitCode=0 Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.662190 4902 generic.go:334] "Generic (PLEG): container finished" podID="6728665d-94d1-4dae-904e-17a2c82959be" containerID="bf3fb75f250cc342eec18e2f28caacec1d7472e4d5f57c68c053af5acaebc259" exitCode=143 Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.662290 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6728665d-94d1-4dae-904e-17a2c82959be","Type":"ContainerDied","Data":"b9c16cac248c0f08e30858bb10969b3a47486a5dd35987033e74e61223c0ea18"} Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.662332 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6728665d-94d1-4dae-904e-17a2c82959be","Type":"ContainerDied","Data":"bf3fb75f250cc342eec18e2f28caacec1d7472e4d5f57c68c053af5acaebc259"} Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.700162 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-q9f9f" event={"ID":"4666a144-4ec6-43f9-b3d9-74d0bef2e9a9","Type":"ContainerDied","Data":"ced16ccc86506224bbdde8e9d114cb6a88c40d619af634fce57f0665f9e8fcca"} Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.700203 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ced16ccc86506224bbdde8e9d114cb6a88c40d619af634fce57f0665f9e8fcca" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.700271 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-q9f9f" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.714784 4902 generic.go:334] "Generic (PLEG): container finished" podID="9e6b2ee2-3aac-4805-afc2-72632b4b82ab" containerID="e6cf738cabd31ebf93d40aea121eed14157de70783a31b6bcd8f0ea1c3865380" exitCode=0 Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.715147 4902 generic.go:334] "Generic (PLEG): container finished" podID="9e6b2ee2-3aac-4805-afc2-72632b4b82ab" containerID="cd543239bb5ae8531db62adad1d8d8f0c61a914abc1d451008d1786b18637cff" exitCode=143 Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.716088 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9e6b2ee2-3aac-4805-afc2-72632b4b82ab","Type":"ContainerDied","Data":"e6cf738cabd31ebf93d40aea121eed14157de70783a31b6bcd8f0ea1c3865380"} Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.716121 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9e6b2ee2-3aac-4805-afc2-72632b4b82ab","Type":"ContainerDied","Data":"cd543239bb5ae8531db62adad1d8d8f0c61a914abc1d451008d1786b18637cff"} Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.775231 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-786fb786b8-w8tbv"] Dec 02 14:36:25 crc kubenswrapper[4902]: E1202 14:36:25.775787 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29642134-e5dd-44f7-aa97-a3b70af91779" containerName="init" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.775799 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="29642134-e5dd-44f7-aa97-a3b70af91779" containerName="init" Dec 02 14:36:25 crc kubenswrapper[4902]: E1202 14:36:25.775821 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4666a144-4ec6-43f9-b3d9-74d0bef2e9a9" containerName="placement-db-sync" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.775828 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="4666a144-4ec6-43f9-b3d9-74d0bef2e9a9" containerName="placement-db-sync" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.776024 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="29642134-e5dd-44f7-aa97-a3b70af91779" containerName="init" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.776040 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="4666a144-4ec6-43f9-b3d9-74d0bef2e9a9" containerName="placement-db-sync" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.777396 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.780467 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-gfvx4" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.780621 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.782359 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.783600 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.783822 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.847691 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-786fb786b8-w8tbv"] Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.868352 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-public-tls-certs\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.868472 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-config-data\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.868523 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-internal-tls-certs\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.868552 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d44gz\" (UniqueName: \"kubernetes.io/projected/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-kube-api-access-d44gz\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.868616 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-logs\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.868638 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-scripts\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.868708 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-combined-ca-bundle\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.970209 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-combined-ca-bundle\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.970296 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-public-tls-certs\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.970330 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-config-data\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.970369 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-internal-tls-certs\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.970384 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d44gz\" (UniqueName: \"kubernetes.io/projected/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-kube-api-access-d44gz\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.970410 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-logs\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.970426 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-scripts\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.978442 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-logs\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:25 crc kubenswrapper[4902]: I1202 14:36:25.984610 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-public-tls-certs\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.013225 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-scripts\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.015204 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-config-data\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.017612 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-internal-tls-certs\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.018019 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d44gz\" (UniqueName: \"kubernetes.io/projected/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-kube-api-access-d44gz\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.020392 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3-combined-ca-bundle\") pod \"placement-786fb786b8-w8tbv\" (UID: \"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3\") " pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.133101 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.155935 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.192246 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.278990 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-combined-ca-bundle\") pod \"6728665d-94d1-4dae-904e-17a2c82959be\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.280418 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkkbm\" (UniqueName: \"kubernetes.io/projected/6728665d-94d1-4dae-904e-17a2c82959be-kube-api-access-xkkbm\") pod \"6728665d-94d1-4dae-904e-17a2c82959be\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.280966 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6728665d-94d1-4dae-904e-17a2c82959be-logs\") pod \"6728665d-94d1-4dae-904e-17a2c82959be\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.281006 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-scripts\") pod \"6728665d-94d1-4dae-904e-17a2c82959be\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.281071 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"6728665d-94d1-4dae-904e-17a2c82959be\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.281101 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-config-data\") pod \"6728665d-94d1-4dae-904e-17a2c82959be\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.281120 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6728665d-94d1-4dae-904e-17a2c82959be-httpd-run\") pod \"6728665d-94d1-4dae-904e-17a2c82959be\" (UID: \"6728665d-94d1-4dae-904e-17a2c82959be\") " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.282324 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6728665d-94d1-4dae-904e-17a2c82959be-logs" (OuterVolumeSpecName: "logs") pod "6728665d-94d1-4dae-904e-17a2c82959be" (UID: "6728665d-94d1-4dae-904e-17a2c82959be"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.282869 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6728665d-94d1-4dae-904e-17a2c82959be-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6728665d-94d1-4dae-904e-17a2c82959be" (UID: "6728665d-94d1-4dae-904e-17a2c82959be"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.287552 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6728665d-94d1-4dae-904e-17a2c82959be-kube-api-access-xkkbm" (OuterVolumeSpecName: "kube-api-access-xkkbm") pod "6728665d-94d1-4dae-904e-17a2c82959be" (UID: "6728665d-94d1-4dae-904e-17a2c82959be"). InnerVolumeSpecName "kube-api-access-xkkbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.295683 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-scripts" (OuterVolumeSpecName: "scripts") pod "6728665d-94d1-4dae-904e-17a2c82959be" (UID: "6728665d-94d1-4dae-904e-17a2c82959be"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.297994 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "6728665d-94d1-4dae-904e-17a2c82959be" (UID: "6728665d-94d1-4dae-904e-17a2c82959be"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.340649 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6728665d-94d1-4dae-904e-17a2c82959be" (UID: "6728665d-94d1-4dae-904e-17a2c82959be"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.382522 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-httpd-run\") pod \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.383126 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9e6b2ee2-3aac-4805-afc2-72632b4b82ab" (UID: "9e6b2ee2-3aac-4805-afc2-72632b4b82ab"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.383359 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-combined-ca-bundle\") pod \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.383425 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zcwr\" (UniqueName: \"kubernetes.io/projected/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-kube-api-access-9zcwr\") pod \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.383447 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-config-data\") pod \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.383826 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-logs\") pod \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.383893 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-scripts\") pod \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.383917 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\" (UID: \"9e6b2ee2-3aac-4805-afc2-72632b4b82ab\") " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.384399 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-logs" (OuterVolumeSpecName: "logs") pod "9e6b2ee2-3aac-4805-afc2-72632b4b82ab" (UID: "9e6b2ee2-3aac-4805-afc2-72632b4b82ab"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.385540 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6728665d-94d1-4dae-904e-17a2c82959be-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.385554 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.385573 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.385581 4902 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.385600 4902 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.385609 4902 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6728665d-94d1-4dae-904e-17a2c82959be-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.385712 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.385725 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkkbm\" (UniqueName: \"kubernetes.io/projected/6728665d-94d1-4dae-904e-17a2c82959be-kube-api-access-xkkbm\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.390082 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-scripts" (OuterVolumeSpecName: "scripts") pod "9e6b2ee2-3aac-4805-afc2-72632b4b82ab" (UID: "9e6b2ee2-3aac-4805-afc2-72632b4b82ab"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.400810 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-kube-api-access-9zcwr" (OuterVolumeSpecName: "kube-api-access-9zcwr") pod "9e6b2ee2-3aac-4805-afc2-72632b4b82ab" (UID: "9e6b2ee2-3aac-4805-afc2-72632b4b82ab"). InnerVolumeSpecName "kube-api-access-9zcwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.404207 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "9e6b2ee2-3aac-4805-afc2-72632b4b82ab" (UID: "9e6b2ee2-3aac-4805-afc2-72632b4b82ab"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.409323 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-config-data" (OuterVolumeSpecName: "config-data") pod "6728665d-94d1-4dae-904e-17a2c82959be" (UID: "6728665d-94d1-4dae-904e-17a2c82959be"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.433789 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e6b2ee2-3aac-4805-afc2-72632b4b82ab" (UID: "9e6b2ee2-3aac-4805-afc2-72632b4b82ab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.435675 4902 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.479045 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-config-data" (OuterVolumeSpecName: "config-data") pod "9e6b2ee2-3aac-4805-afc2-72632b4b82ab" (UID: "9e6b2ee2-3aac-4805-afc2-72632b4b82ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.490721 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.491012 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zcwr\" (UniqueName: \"kubernetes.io/projected/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-kube-api-access-9zcwr\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.491086 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.491141 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e6b2ee2-3aac-4805-afc2-72632b4b82ab-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.491210 4902 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.491268 4902 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.491329 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6728665d-94d1-4dae-904e-17a2c82959be-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.515127 4902 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.601230 4902 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.746046 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6728665d-94d1-4dae-904e-17a2c82959be","Type":"ContainerDied","Data":"76842f5ef2e6c4d103ee7c43329028506e8f7dfd74252bcff0d89474f7b2083d"} Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.746103 4902 scope.go:117] "RemoveContainer" containerID="b9c16cac248c0f08e30858bb10969b3a47486a5dd35987033e74e61223c0ea18" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.746231 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.749931 4902 generic.go:334] "Generic (PLEG): container finished" podID="082d182d-ad5d-4d70-ab12-bac95950cc73" containerID="f989e94db4d2c2cba09db820d4f65afbabc25c5810021646cdafbe57c6a7c61c" exitCode=0 Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.749990 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sr89q" event={"ID":"082d182d-ad5d-4d70-ab12-bac95950cc73","Type":"ContainerDied","Data":"f989e94db4d2c2cba09db820d4f65afbabc25c5810021646cdafbe57c6a7c61c"} Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.764855 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9e6b2ee2-3aac-4805-afc2-72632b4b82ab","Type":"ContainerDied","Data":"1a142c6f6a3d3cb88e92f474af1279e7ae0c01c5992cb078e654b39e8de5a63c"} Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.764946 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.784550 4902 scope.go:117] "RemoveContainer" containerID="bf3fb75f250cc342eec18e2f28caacec1d7472e4d5f57c68c053af5acaebc259" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.799619 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.807600 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.817400 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-786fb786b8-w8tbv"] Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.825445 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.837823 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.848325 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 14:36:26 crc kubenswrapper[4902]: E1202 14:36:26.848843 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6728665d-94d1-4dae-904e-17a2c82959be" containerName="glance-httpd" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.848861 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="6728665d-94d1-4dae-904e-17a2c82959be" containerName="glance-httpd" Dec 02 14:36:26 crc kubenswrapper[4902]: E1202 14:36:26.848879 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6728665d-94d1-4dae-904e-17a2c82959be" containerName="glance-log" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.848886 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="6728665d-94d1-4dae-904e-17a2c82959be" containerName="glance-log" Dec 02 14:36:26 crc kubenswrapper[4902]: E1202 14:36:26.848894 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e6b2ee2-3aac-4805-afc2-72632b4b82ab" containerName="glance-httpd" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.848900 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e6b2ee2-3aac-4805-afc2-72632b4b82ab" containerName="glance-httpd" Dec 02 14:36:26 crc kubenswrapper[4902]: E1202 14:36:26.848913 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e6b2ee2-3aac-4805-afc2-72632b4b82ab" containerName="glance-log" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.848919 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e6b2ee2-3aac-4805-afc2-72632b4b82ab" containerName="glance-log" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.849909 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e6b2ee2-3aac-4805-afc2-72632b4b82ab" containerName="glance-httpd" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.849960 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e6b2ee2-3aac-4805-afc2-72632b4b82ab" containerName="glance-log" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.849979 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="6728665d-94d1-4dae-904e-17a2c82959be" containerName="glance-log" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.849991 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="6728665d-94d1-4dae-904e-17a2c82959be" containerName="glance-httpd" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.859723 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.859833 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.863866 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-64b6w" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.864183 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.864308 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.864481 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.870165 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.871642 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.874334 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.874530 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.899504 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 14:36:26 crc kubenswrapper[4902]: I1202 14:36:26.953119 4902 scope.go:117] "RemoveContainer" containerID="e6cf738cabd31ebf93d40aea121eed14157de70783a31b6bcd8f0ea1c3865380" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030268 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe71c826-cad9-405b-824e-f5a856927efe-logs\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030380 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030409 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-config-data\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030452 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1d55b635-d5b6-4a77-9019-f8741adc7a93-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030484 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcgtr\" (UniqueName: \"kubernetes.io/projected/1d55b635-d5b6-4a77-9019-f8741adc7a93-kube-api-access-kcgtr\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030589 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030611 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030647 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030671 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fe71c826-cad9-405b-824e-f5a856927efe-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030724 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d55b635-d5b6-4a77-9019-f8741adc7a93-logs\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030745 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l28m9\" (UniqueName: \"kubernetes.io/projected/fe71c826-cad9-405b-824e-f5a856927efe-kube-api-access-l28m9\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030767 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030822 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-scripts\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030855 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030893 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.030970 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.041783 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.130755 4902 scope.go:117] "RemoveContainer" containerID="cd543239bb5ae8531db62adad1d8d8f0c61a914abc1d451008d1786b18637cff" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132198 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132226 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132251 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132269 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fe71c826-cad9-405b-824e-f5a856927efe-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132293 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d55b635-d5b6-4a77-9019-f8741adc7a93-logs\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132310 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l28m9\" (UniqueName: \"kubernetes.io/projected/fe71c826-cad9-405b-824e-f5a856927efe-kube-api-access-l28m9\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132332 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132358 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-scripts\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132375 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132397 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132448 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132479 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe71c826-cad9-405b-824e-f5a856927efe-logs\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132516 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132532 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-config-data\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132551 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1d55b635-d5b6-4a77-9019-f8741adc7a93-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.132584 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcgtr\" (UniqueName: \"kubernetes.io/projected/1d55b635-d5b6-4a77-9019-f8741adc7a93-kube-api-access-kcgtr\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.133112 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.134524 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d55b635-d5b6-4a77-9019-f8741adc7a93-logs\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.135255 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe71c826-cad9-405b-824e-f5a856927efe-logs\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.141167 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.142084 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fe71c826-cad9-405b-824e-f5a856927efe-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.145980 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1d55b635-d5b6-4a77-9019-f8741adc7a93-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.149538 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6728665d-94d1-4dae-904e-17a2c82959be" path="/var/lib/kubelet/pods/6728665d-94d1-4dae-904e-17a2c82959be/volumes" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.160231 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.160237 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.167872 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.169657 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e6b2ee2-3aac-4805-afc2-72632b4b82ab" path="/var/lib/kubelet/pods/9e6b2ee2-3aac-4805-afc2-72632b4b82ab/volumes" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.171037 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-config-data\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.182546 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.191792 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l28m9\" (UniqueName: \"kubernetes.io/projected/fe71c826-cad9-405b-824e-f5a856927efe-kube-api-access-l28m9\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.200415 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.204207 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-scripts\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.205240 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcgtr\" (UniqueName: \"kubernetes.io/projected/1d55b635-d5b6-4a77-9019-f8741adc7a93-kube-api-access-kcgtr\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.219294 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.298817 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.300110 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.337391 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.415696 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.415737 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.438970 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.449113 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.451398 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.528955 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.787679 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-786fb786b8-w8tbv" event={"ID":"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3","Type":"ContainerStarted","Data":"0e4fb46060b2dbf28ef05e849becb57c0593b5a58efdecd8f671ff6c6ca2da52"} Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.788051 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-786fb786b8-w8tbv" event={"ID":"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3","Type":"ContainerStarted","Data":"d4938a4707351023053ea1b9974d64859d18d0afda0e56f1cc8f8f751d3437a1"} Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.788732 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.799918 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.835757 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.917001 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.975620 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-pprmb"] Dec 02 14:36:27 crc kubenswrapper[4902]: I1202 14:36:27.975923 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" podUID="e30c107c-9639-41d6-a4de-283d44a0c7de" containerName="dnsmasq-dns" containerID="cri-o://41cae3113f8c71e2b450bc6ab493125eb96587bf4f0c0bd36c1580fadfe06655" gracePeriod=10 Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.174466 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 14:36:28 crc kubenswrapper[4902]: E1202 14:36:28.335986 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode30c107c_9639_41d6_a4de_283d44a0c7de.slice/crio-41cae3113f8c71e2b450bc6ab493125eb96587bf4f0c0bd36c1580fadfe06655.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode30c107c_9639_41d6_a4de_283d44a0c7de.slice/crio-conmon-41cae3113f8c71e2b450bc6ab493125eb96587bf4f0c0bd36c1580fadfe06655.scope\": RecentStats: unable to find data in memory cache]" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.354132 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.452870 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.542535 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-applier-0" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.579008 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-scripts\") pod \"082d182d-ad5d-4d70-ab12-bac95950cc73\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.579192 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-config-data\") pod \"082d182d-ad5d-4d70-ab12-bac95950cc73\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.579298 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9jvk\" (UniqueName: \"kubernetes.io/projected/082d182d-ad5d-4d70-ab12-bac95950cc73-kube-api-access-f9jvk\") pod \"082d182d-ad5d-4d70-ab12-bac95950cc73\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.579504 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-credential-keys\") pod \"082d182d-ad5d-4d70-ab12-bac95950cc73\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.579550 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-fernet-keys\") pod \"082d182d-ad5d-4d70-ab12-bac95950cc73\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.579686 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-combined-ca-bundle\") pod \"082d182d-ad5d-4d70-ab12-bac95950cc73\" (UID: \"082d182d-ad5d-4d70-ab12-bac95950cc73\") " Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.643745 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/082d182d-ad5d-4d70-ab12-bac95950cc73-kube-api-access-f9jvk" (OuterVolumeSpecName: "kube-api-access-f9jvk") pod "082d182d-ad5d-4d70-ab12-bac95950cc73" (UID: "082d182d-ad5d-4d70-ab12-bac95950cc73"). InnerVolumeSpecName "kube-api-access-f9jvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.662833 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "082d182d-ad5d-4d70-ab12-bac95950cc73" (UID: "082d182d-ad5d-4d70-ab12-bac95950cc73"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.682796 4902 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.682842 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9jvk\" (UniqueName: \"kubernetes.io/projected/082d182d-ad5d-4d70-ab12-bac95950cc73-kube-api-access-f9jvk\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.707330 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-scripts" (OuterVolumeSpecName: "scripts") pod "082d182d-ad5d-4d70-ab12-bac95950cc73" (UID: "082d182d-ad5d-4d70-ab12-bac95950cc73"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.745414 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "082d182d-ad5d-4d70-ab12-bac95950cc73" (UID: "082d182d-ad5d-4d70-ab12-bac95950cc73"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.769584 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "082d182d-ad5d-4d70-ab12-bac95950cc73" (UID: "082d182d-ad5d-4d70-ab12-bac95950cc73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.769726 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-applier-0" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.786840 4902 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.786875 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.786884 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.840736 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fe71c826-cad9-405b-824e-f5a856927efe","Type":"ContainerStarted","Data":"fcedf88a9e1c902761c47499b48925d6f380d576584c3d86b975d94cd7f9cfa5"} Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.892132 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-786fb786b8-w8tbv" event={"ID":"6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3","Type":"ContainerStarted","Data":"9d2f49e4a49667fc84138e67c2edd680df432f8a6ace5422682f5389e2435b2f"} Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.893249 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.893278 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.944390 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-config-data" (OuterVolumeSpecName: "config-data") pod "082d182d-ad5d-4d70-ab12-bac95950cc73" (UID: "082d182d-ad5d-4d70-ab12-bac95950cc73"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.954098 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-786fb786b8-w8tbv" podStartSLOduration=3.954081344 podStartE2EDuration="3.954081344s" podCreationTimestamp="2025-12-02 14:36:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:28.951372818 +0000 UTC m=+1220.142681527" watchObservedRunningTime="2025-12-02 14:36:28.954081344 +0000 UTC m=+1220.145390053" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.993691 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/082d182d-ad5d-4d70-ab12-bac95950cc73-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.995265 4902 generic.go:334] "Generic (PLEG): container finished" podID="e30c107c-9639-41d6-a4de-283d44a0c7de" containerID="41cae3113f8c71e2b450bc6ab493125eb96587bf4f0c0bd36c1580fadfe06655" exitCode=0 Dec 02 14:36:28 crc kubenswrapper[4902]: I1202 14:36:28.995323 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" event={"ID":"e30c107c-9639-41d6-a4de-283d44a0c7de","Type":"ContainerDied","Data":"41cae3113f8c71e2b450bc6ab493125eb96587bf4f0c0bd36c1580fadfe06655"} Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.009849 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-867694b54b-2t5p7"] Dec 02 14:36:29 crc kubenswrapper[4902]: E1202 14:36:29.010239 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="082d182d-ad5d-4d70-ab12-bac95950cc73" containerName="keystone-bootstrap" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.010250 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="082d182d-ad5d-4d70-ab12-bac95950cc73" containerName="keystone-bootstrap" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.010441 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="082d182d-ad5d-4d70-ab12-bac95950cc73" containerName="keystone-bootstrap" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.011044 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-867694b54b-2t5p7"] Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.011116 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.015818 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sr89q" event={"ID":"082d182d-ad5d-4d70-ab12-bac95950cc73","Type":"ContainerDied","Data":"f8c26fc2837ce2bb2ec7399ba6c471d92599f6bef334de27cac20422282997fd"} Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.015847 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8c26fc2837ce2bb2ec7399ba6c471d92599f6bef334de27cac20422282997fd" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.015896 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sr89q" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.035015 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.035869 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.063601 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1d55b635-d5b6-4a77-9019-f8741adc7a93","Type":"ContainerStarted","Data":"0399ff36acccc9ccbdf08028c424ba627fd90eeba2bfd0280a631d3e80505d55"} Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.073815 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.098156 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-config-data\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.098330 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-scripts\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.098394 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-internal-tls-certs\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.098429 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-public-tls-certs\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.098489 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-combined-ca-bundle\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.098521 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-fernet-keys\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.098554 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqsfj\" (UniqueName: \"kubernetes.io/projected/1a591864-bb12-4847-ba0a-567637fbdaa9-kube-api-access-bqsfj\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.098596 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-credential-keys\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.199518 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-ovsdbserver-nb\") pod \"e30c107c-9639-41d6-a4de-283d44a0c7de\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.199576 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-dns-swift-storage-0\") pod \"e30c107c-9639-41d6-a4de-283d44a0c7de\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.199707 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-config\") pod \"e30c107c-9639-41d6-a4de-283d44a0c7de\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.199791 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cl8ql\" (UniqueName: \"kubernetes.io/projected/e30c107c-9639-41d6-a4de-283d44a0c7de-kube-api-access-cl8ql\") pod \"e30c107c-9639-41d6-a4de-283d44a0c7de\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.199859 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-dns-svc\") pod \"e30c107c-9639-41d6-a4de-283d44a0c7de\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.199876 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-ovsdbserver-sb\") pod \"e30c107c-9639-41d6-a4de-283d44a0c7de\" (UID: \"e30c107c-9639-41d6-a4de-283d44a0c7de\") " Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.201019 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-internal-tls-certs\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.201060 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-public-tls-certs\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.201107 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-combined-ca-bundle\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.201134 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-fernet-keys\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.201162 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqsfj\" (UniqueName: \"kubernetes.io/projected/1a591864-bb12-4847-ba0a-567637fbdaa9-kube-api-access-bqsfj\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.201189 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-credential-keys\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.201284 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-config-data\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.201343 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-scripts\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.213504 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-applier-0" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.215532 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e30c107c-9639-41d6-a4de-283d44a0c7de-kube-api-access-cl8ql" (OuterVolumeSpecName: "kube-api-access-cl8ql") pod "e30c107c-9639-41d6-a4de-283d44a0c7de" (UID: "e30c107c-9639-41d6-a4de-283d44a0c7de"). InnerVolumeSpecName "kube-api-access-cl8ql". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.215814 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-internal-tls-certs\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.216100 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-fernet-keys\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.216174 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-credential-keys\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.220075 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-public-tls-certs\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.221357 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-combined-ca-bundle\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.221395 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-config-data\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.241148 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqsfj\" (UniqueName: \"kubernetes.io/projected/1a591864-bb12-4847-ba0a-567637fbdaa9-kube-api-access-bqsfj\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.246472 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a591864-bb12-4847-ba0a-567637fbdaa9-scripts\") pod \"keystone-867694b54b-2t5p7\" (UID: \"1a591864-bb12-4847-ba0a-567637fbdaa9\") " pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.305393 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cl8ql\" (UniqueName: \"kubernetes.io/projected/e30c107c-9639-41d6-a4de-283d44a0c7de-kube-api-access-cl8ql\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.312226 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e30c107c-9639-41d6-a4de-283d44a0c7de" (UID: "e30c107c-9639-41d6-a4de-283d44a0c7de"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.344304 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e30c107c-9639-41d6-a4de-283d44a0c7de" (UID: "e30c107c-9639-41d6-a4de-283d44a0c7de"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.349915 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e30c107c-9639-41d6-a4de-283d44a0c7de" (UID: "e30c107c-9639-41d6-a4de-283d44a0c7de"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.364342 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.371446 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e30c107c-9639-41d6-a4de-283d44a0c7de" (UID: "e30c107c-9639-41d6-a4de-283d44a0c7de"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.403483 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-config" (OuterVolumeSpecName: "config") pod "e30c107c-9639-41d6-a4de-283d44a0c7de" (UID: "e30c107c-9639-41d6-a4de-283d44a0c7de"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.412356 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.412395 4902 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.412407 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.412425 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.412434 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e30c107c-9639-41d6-a4de-283d44a0c7de-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:29 crc kubenswrapper[4902]: I1202 14:36:29.974861 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-867694b54b-2t5p7"] Dec 02 14:36:30 crc kubenswrapper[4902]: I1202 14:36:30.104114 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fe71c826-cad9-405b-824e-f5a856927efe","Type":"ContainerStarted","Data":"dd5511618452c435125a4f28118386ece9aa4f051f72ee4098c53439368265db"} Dec 02 14:36:30 crc kubenswrapper[4902]: I1202 14:36:30.116570 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" event={"ID":"e30c107c-9639-41d6-a4de-283d44a0c7de","Type":"ContainerDied","Data":"aaa1e77e16692125940d457ce4a7f54bca0927cb991dec030def8873e870e889"} Dec 02 14:36:30 crc kubenswrapper[4902]: I1202 14:36:30.116615 4902 scope.go:117] "RemoveContainer" containerID="41cae3113f8c71e2b450bc6ab493125eb96587bf4f0c0bd36c1580fadfe06655" Dec 02 14:36:30 crc kubenswrapper[4902]: I1202 14:36:30.116737 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-pprmb" Dec 02 14:36:30 crc kubenswrapper[4902]: I1202 14:36:30.142605 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1d55b635-d5b6-4a77-9019-f8741adc7a93","Type":"ContainerStarted","Data":"7f9f473ac07c4df89e1bdc2cec14a94204e2ccc14412f2c05235b343422f9e59"} Dec 02 14:36:30 crc kubenswrapper[4902]: I1202 14:36:30.357056 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-pprmb"] Dec 02 14:36:30 crc kubenswrapper[4902]: I1202 14:36:30.372918 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-pprmb"] Dec 02 14:36:30 crc kubenswrapper[4902]: I1202 14:36:30.397261 4902 scope.go:117] "RemoveContainer" containerID="989fbc95ce9a434b18c838996717c1b69cbbd232c02991715a484a5410716285" Dec 02 14:36:30 crc kubenswrapper[4902]: I1202 14:36:30.751935 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:36:30 crc kubenswrapper[4902]: I1202 14:36:30.752298 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:36:30 crc kubenswrapper[4902]: I1202 14:36:30.754534 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-85bf5878d8-bn7cr" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.159:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.159:8443: connect: connection refused" Dec 02 14:36:30 crc kubenswrapper[4902]: I1202 14:36:30.967528 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:36:30 crc kubenswrapper[4902]: I1202 14:36:30.969778 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:36:30 crc kubenswrapper[4902]: I1202 14:36:30.979155 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-54c4bbdfbb-v8pjf" podUID="462ead25-02ec-4791-b927-56cf3f26ca39" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.160:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.160:8443: connect: connection refused" Dec 02 14:36:31 crc kubenswrapper[4902]: I1202 14:36:31.118887 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e30c107c-9639-41d6-a4de-283d44a0c7de" path="/var/lib/kubelet/pods/e30c107c-9639-41d6-a4de-283d44a0c7de/volumes" Dec 02 14:36:31 crc kubenswrapper[4902]: I1202 14:36:31.166674 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1d55b635-d5b6-4a77-9019-f8741adc7a93","Type":"ContainerStarted","Data":"b1932c4ae1b856f2ee864e26a86aec06defff0bd97dcf0b546274265c8f87a61"} Dec 02 14:36:31 crc kubenswrapper[4902]: I1202 14:36:31.176909 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-867694b54b-2t5p7" event={"ID":"1a591864-bb12-4847-ba0a-567637fbdaa9","Type":"ContainerStarted","Data":"90fc12c78005ac150a156d00df098036a765fb08f00eedf9a487481982def9bd"} Dec 02 14:36:31 crc kubenswrapper[4902]: I1202 14:36:31.176966 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-867694b54b-2t5p7" event={"ID":"1a591864-bb12-4847-ba0a-567637fbdaa9","Type":"ContainerStarted","Data":"994d6f83c54f0247d0917f1b1d2f5f26ca89a2e189c65b6c8a381b1104438578"} Dec 02 14:36:32 crc kubenswrapper[4902]: I1202 14:36:32.194676 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fe71c826-cad9-405b-824e-f5a856927efe","Type":"ContainerStarted","Data":"028522c4ff2f102fae123d041522c0e99ff16155b56b27fcab42d83780cb6075"} Dec 02 14:36:32 crc kubenswrapper[4902]: I1202 14:36:32.195626 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:36:32 crc kubenswrapper[4902]: I1202 14:36:32.231454 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-867694b54b-2t5p7" podStartSLOduration=4.231433214 podStartE2EDuration="4.231433214s" podCreationTimestamp="2025-12-02 14:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:32.213519761 +0000 UTC m=+1223.404828470" watchObservedRunningTime="2025-12-02 14:36:32.231433214 +0000 UTC m=+1223.422741923" Dec 02 14:36:32 crc kubenswrapper[4902]: I1202 14:36:32.253470 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.253446613 podStartE2EDuration="6.253446613s" podCreationTimestamp="2025-12-02 14:36:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:32.24765343 +0000 UTC m=+1223.438962139" watchObservedRunningTime="2025-12-02 14:36:32.253446613 +0000 UTC m=+1223.444755322" Dec 02 14:36:32 crc kubenswrapper[4902]: I1202 14:36:32.281919 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.281904534 podStartE2EDuration="6.281904534s" podCreationTimestamp="2025-12-02 14:36:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:32.269626868 +0000 UTC m=+1223.460935587" watchObservedRunningTime="2025-12-02 14:36:32.281904534 +0000 UTC m=+1223.473213243" Dec 02 14:36:32 crc kubenswrapper[4902]: I1202 14:36:32.670073 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 02 14:36:32 crc kubenswrapper[4902]: I1202 14:36:32.670461 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="15ef989e-35ba-446e-a272-708181f84676" containerName="watcher-api" containerID="cri-o://0ee92cbb2b962b3eb4f659056f499805f2f16ecd03907db9f3543bfc00f7fce5" gracePeriod=30 Dec 02 14:36:32 crc kubenswrapper[4902]: I1202 14:36:32.670356 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="15ef989e-35ba-446e-a272-708181f84676" containerName="watcher-api-log" containerID="cri-o://206f480261c17ee366d079c8311055bd4dabd360c1483c60b9639af5d0fcbc62" gracePeriod=30 Dec 02 14:36:33 crc kubenswrapper[4902]: I1202 14:36:33.206845 4902 generic.go:334] "Generic (PLEG): container finished" podID="15ef989e-35ba-446e-a272-708181f84676" containerID="206f480261c17ee366d079c8311055bd4dabd360c1483c60b9639af5d0fcbc62" exitCode=143 Dec 02 14:36:33 crc kubenswrapper[4902]: I1202 14:36:33.206881 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"15ef989e-35ba-446e-a272-708181f84676","Type":"ContainerDied","Data":"206f480261c17ee366d079c8311055bd4dabd360c1483c60b9639af5d0fcbc62"} Dec 02 14:36:34 crc kubenswrapper[4902]: I1202 14:36:34.731958 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:36:34 crc kubenswrapper[4902]: I1202 14:36:34.732377 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.279365 4902 generic.go:334] "Generic (PLEG): container finished" podID="15ef989e-35ba-446e-a272-708181f84676" containerID="0ee92cbb2b962b3eb4f659056f499805f2f16ecd03907db9f3543bfc00f7fce5" exitCode=0 Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.279928 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"15ef989e-35ba-446e-a272-708181f84676","Type":"ContainerDied","Data":"0ee92cbb2b962b3eb4f659056f499805f2f16ecd03907db9f3543bfc00f7fce5"} Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.339538 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.339592 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.411513 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.444830 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.448412 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.448457 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.504337 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.581555 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.690371 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.823090 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-config-data\") pod \"15ef989e-35ba-446e-a272-708181f84676\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.823172 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-custom-prometheus-ca\") pod \"15ef989e-35ba-446e-a272-708181f84676\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.823221 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-combined-ca-bundle\") pod \"15ef989e-35ba-446e-a272-708181f84676\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.823267 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15ef989e-35ba-446e-a272-708181f84676-logs\") pod \"15ef989e-35ba-446e-a272-708181f84676\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.823369 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvfzn\" (UniqueName: \"kubernetes.io/projected/15ef989e-35ba-446e-a272-708181f84676-kube-api-access-jvfzn\") pod \"15ef989e-35ba-446e-a272-708181f84676\" (UID: \"15ef989e-35ba-446e-a272-708181f84676\") " Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.828471 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15ef989e-35ba-446e-a272-708181f84676-logs" (OuterVolumeSpecName: "logs") pod "15ef989e-35ba-446e-a272-708181f84676" (UID: "15ef989e-35ba-446e-a272-708181f84676"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.834240 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15ef989e-35ba-446e-a272-708181f84676-kube-api-access-jvfzn" (OuterVolumeSpecName: "kube-api-access-jvfzn") pod "15ef989e-35ba-446e-a272-708181f84676" (UID: "15ef989e-35ba-446e-a272-708181f84676"). InnerVolumeSpecName "kube-api-access-jvfzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.864556 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15ef989e-35ba-446e-a272-708181f84676" (UID: "15ef989e-35ba-446e-a272-708181f84676"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.897024 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "15ef989e-35ba-446e-a272-708181f84676" (UID: "15ef989e-35ba-446e-a272-708181f84676"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.900679 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-config-data" (OuterVolumeSpecName: "config-data") pod "15ef989e-35ba-446e-a272-708181f84676" (UID: "15ef989e-35ba-446e-a272-708181f84676"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.927697 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.927735 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15ef989e-35ba-446e-a272-708181f84676-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.927747 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvfzn\" (UniqueName: \"kubernetes.io/projected/15ef989e-35ba-446e-a272-708181f84676-kube-api-access-jvfzn\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.927758 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:37 crc kubenswrapper[4902]: I1202 14:36:37.927767 4902 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/15ef989e-35ba-446e-a272-708181f84676-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.291728 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"15ef989e-35ba-446e-a272-708181f84676","Type":"ContainerDied","Data":"33412b5da06aa1e05f0df1831faab11b06392acbf86a87db681ca464c786f37e"} Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.291778 4902 scope.go:117] "RemoveContainer" containerID="0ee92cbb2b962b3eb4f659056f499805f2f16ecd03907db9f3543bfc00f7fce5" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.291893 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.315219 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rgnm2" event={"ID":"52b3cd40-3726-4b52-8e64-8b15f5f02a99","Type":"ContainerStarted","Data":"3c8201f92fd1f7fd470adbe18107a7bf7b6d8b7a231eae1e82cff6b8205915ba"} Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.321862 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97eb051a-3187-447f-8bf6-71ae3d8f65d7","Type":"ContainerStarted","Data":"9c2dd45401f43a833b75f162e6e5b5c3596b524aeab9e1347a8075b1bd83e5c8"} Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.335595 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7n2ss" event={"ID":"9c39a4fe-20e0-458b-a226-d6f5ad9cb846","Type":"ContainerStarted","Data":"3f338b0a4fb1d24b97326ae424a40e677f52b20c69e5b9785983513d6c3d26f8"} Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.335639 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.337599 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.337620 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.337637 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.345597 4902 scope.go:117] "RemoveContainer" containerID="206f480261c17ee366d079c8311055bd4dabd360c1483c60b9639af5d0fcbc62" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.353671 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-rgnm2" podStartSLOduration=3.778909185 podStartE2EDuration="57.353657524s" podCreationTimestamp="2025-12-02 14:35:41 +0000 UTC" firstStartedPulling="2025-12-02 14:35:43.651963446 +0000 UTC m=+1174.843272155" lastFinishedPulling="2025-12-02 14:36:37.226711785 +0000 UTC m=+1228.418020494" observedRunningTime="2025-12-02 14:36:38.343119888 +0000 UTC m=+1229.534428597" watchObservedRunningTime="2025-12-02 14:36:38.353657524 +0000 UTC m=+1229.544966233" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.366646 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-7n2ss" podStartSLOduration=4.03974036 podStartE2EDuration="57.366629689s" podCreationTimestamp="2025-12-02 14:35:41 +0000 UTC" firstStartedPulling="2025-12-02 14:35:43.866826398 +0000 UTC m=+1175.058135107" lastFinishedPulling="2025-12-02 14:36:37.193715727 +0000 UTC m=+1228.385024436" observedRunningTime="2025-12-02 14:36:38.356686719 +0000 UTC m=+1229.547995428" watchObservedRunningTime="2025-12-02 14:36:38.366629689 +0000 UTC m=+1229.557938398" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.392142 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.415713 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-api-0"] Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.470636 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Dec 02 14:36:38 crc kubenswrapper[4902]: E1202 14:36:38.471083 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15ef989e-35ba-446e-a272-708181f84676" containerName="watcher-api" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.471098 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="15ef989e-35ba-446e-a272-708181f84676" containerName="watcher-api" Dec 02 14:36:38 crc kubenswrapper[4902]: E1202 14:36:38.471132 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e30c107c-9639-41d6-a4de-283d44a0c7de" containerName="init" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.471140 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="e30c107c-9639-41d6-a4de-283d44a0c7de" containerName="init" Dec 02 14:36:38 crc kubenswrapper[4902]: E1202 14:36:38.471164 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15ef989e-35ba-446e-a272-708181f84676" containerName="watcher-api-log" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.471172 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="15ef989e-35ba-446e-a272-708181f84676" containerName="watcher-api-log" Dec 02 14:36:38 crc kubenswrapper[4902]: E1202 14:36:38.471192 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e30c107c-9639-41d6-a4de-283d44a0c7de" containerName="dnsmasq-dns" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.471199 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="e30c107c-9639-41d6-a4de-283d44a0c7de" containerName="dnsmasq-dns" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.472185 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="15ef989e-35ba-446e-a272-708181f84676" containerName="watcher-api" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.472307 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="15ef989e-35ba-446e-a272-708181f84676" containerName="watcher-api-log" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.472323 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="e30c107c-9639-41d6-a4de-283d44a0c7de" containerName="dnsmasq-dns" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.473886 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.479904 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-internal-svc" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.480265 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-public-svc" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.480308 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.510166 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.560286 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/479a72ce-7b6b-4da9-8315-236e285a6680-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.560365 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/479a72ce-7b6b-4da9-8315-236e285a6680-config-data\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.560404 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/479a72ce-7b6b-4da9-8315-236e285a6680-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.560427 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/479a72ce-7b6b-4da9-8315-236e285a6680-logs\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.560528 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7txc8\" (UniqueName: \"kubernetes.io/projected/479a72ce-7b6b-4da9-8315-236e285a6680-kube-api-access-7txc8\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.560590 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/479a72ce-7b6b-4da9-8315-236e285a6680-public-tls-certs\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.560616 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/479a72ce-7b6b-4da9-8315-236e285a6680-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.664490 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/479a72ce-7b6b-4da9-8315-236e285a6680-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.664549 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/479a72ce-7b6b-4da9-8315-236e285a6680-config-data\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.664589 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/479a72ce-7b6b-4da9-8315-236e285a6680-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.664603 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/479a72ce-7b6b-4da9-8315-236e285a6680-logs\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.664683 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7txc8\" (UniqueName: \"kubernetes.io/projected/479a72ce-7b6b-4da9-8315-236e285a6680-kube-api-access-7txc8\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.664707 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/479a72ce-7b6b-4da9-8315-236e285a6680-public-tls-certs\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.664739 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/479a72ce-7b6b-4da9-8315-236e285a6680-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.665986 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/479a72ce-7b6b-4da9-8315-236e285a6680-logs\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.672255 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/479a72ce-7b6b-4da9-8315-236e285a6680-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.672328 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/479a72ce-7b6b-4da9-8315-236e285a6680-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.676114 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/479a72ce-7b6b-4da9-8315-236e285a6680-public-tls-certs\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.676831 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/479a72ce-7b6b-4da9-8315-236e285a6680-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.682677 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/479a72ce-7b6b-4da9-8315-236e285a6680-config-data\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.682960 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7txc8\" (UniqueName: \"kubernetes.io/projected/479a72ce-7b6b-4da9-8315-236e285a6680-kube-api-access-7txc8\") pod \"watcher-api-0\" (UID: \"479a72ce-7b6b-4da9-8315-236e285a6680\") " pod="openstack/watcher-api-0" Dec 02 14:36:38 crc kubenswrapper[4902]: I1202 14:36:38.822931 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 02 14:36:39 crc kubenswrapper[4902]: I1202 14:36:39.126588 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15ef989e-35ba-446e-a272-708181f84676" path="/var/lib/kubelet/pods/15ef989e-35ba-446e-a272-708181f84676/volumes" Dec 02 14:36:39 crc kubenswrapper[4902]: I1202 14:36:39.415928 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 02 14:36:39 crc kubenswrapper[4902]: W1202 14:36:39.422548 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod479a72ce_7b6b_4da9_8315_236e285a6680.slice/crio-82da6289424f58f13ffd45399bbf208106fce3b128b0c071b5cbd6f03184c31b WatchSource:0}: Error finding container 82da6289424f58f13ffd45399bbf208106fce3b128b0c071b5cbd6f03184c31b: Status 404 returned error can't find the container with id 82da6289424f58f13ffd45399bbf208106fce3b128b0c071b5cbd6f03184c31b Dec 02 14:36:40 crc kubenswrapper[4902]: I1202 14:36:40.385826 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"479a72ce-7b6b-4da9-8315-236e285a6680","Type":"ContainerStarted","Data":"025ec5826ff9ae986f0d665f4827cf28424757ab3366ae58e2c9f46657f40e8a"} Dec 02 14:36:40 crc kubenswrapper[4902]: I1202 14:36:40.386261 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 02 14:36:40 crc kubenswrapper[4902]: I1202 14:36:40.386278 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"479a72ce-7b6b-4da9-8315-236e285a6680","Type":"ContainerStarted","Data":"aebb46b8cae88a6257e3764d6f9b140394f40c2763839332a0a17286ef123a74"} Dec 02 14:36:40 crc kubenswrapper[4902]: I1202 14:36:40.386293 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"479a72ce-7b6b-4da9-8315-236e285a6680","Type":"ContainerStarted","Data":"82da6289424f58f13ffd45399bbf208106fce3b128b0c071b5cbd6f03184c31b"} Dec 02 14:36:40 crc kubenswrapper[4902]: I1202 14:36:40.385908 4902 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 14:36:40 crc kubenswrapper[4902]: I1202 14:36:40.386313 4902 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 14:36:40 crc kubenswrapper[4902]: I1202 14:36:40.386671 4902 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 14:36:40 crc kubenswrapper[4902]: I1202 14:36:40.386688 4902 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 14:36:40 crc kubenswrapper[4902]: I1202 14:36:40.409536 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=2.409518566 podStartE2EDuration="2.409518566s" podCreationTimestamp="2025-12-02 14:36:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:40.40679873 +0000 UTC m=+1231.598107449" watchObservedRunningTime="2025-12-02 14:36:40.409518566 +0000 UTC m=+1231.600827275" Dec 02 14:36:40 crc kubenswrapper[4902]: I1202 14:36:40.752308 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-85bf5878d8-bn7cr" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.159:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.159:8443: connect: connection refused" Dec 02 14:36:40 crc kubenswrapper[4902]: I1202 14:36:40.964826 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-54c4bbdfbb-v8pjf" podUID="462ead25-02ec-4791-b927-56cf3f26ca39" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.160:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.160:8443: connect: connection refused" Dec 02 14:36:41 crc kubenswrapper[4902]: I1202 14:36:41.720507 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:41 crc kubenswrapper[4902]: I1202 14:36:41.721003 4902 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 14:36:41 crc kubenswrapper[4902]: I1202 14:36:41.737020 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 14:36:41 crc kubenswrapper[4902]: I1202 14:36:41.737133 4902 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 14:36:41 crc kubenswrapper[4902]: I1202 14:36:41.742274 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 14:36:41 crc kubenswrapper[4902]: I1202 14:36:41.762593 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 14:36:42 crc kubenswrapper[4902]: I1202 14:36:42.417603 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="15ef989e-35ba-446e-a272-708181f84676" containerName="watcher-api-log" probeResult="failure" output="Get \"http://10.217.0.165:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 14:36:42 crc kubenswrapper[4902]: I1202 14:36:42.417712 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="15ef989e-35ba-446e-a272-708181f84676" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.165:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 14:36:43 crc kubenswrapper[4902]: I1202 14:36:43.435968 4902 generic.go:334] "Generic (PLEG): container finished" podID="9c39a4fe-20e0-458b-a226-d6f5ad9cb846" containerID="3f338b0a4fb1d24b97326ae424a40e677f52b20c69e5b9785983513d6c3d26f8" exitCode=0 Dec 02 14:36:43 crc kubenswrapper[4902]: I1202 14:36:43.436100 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7n2ss" event={"ID":"9c39a4fe-20e0-458b-a226-d6f5ad9cb846","Type":"ContainerDied","Data":"3f338b0a4fb1d24b97326ae424a40e677f52b20c69e5b9785983513d6c3d26f8"} Dec 02 14:36:43 crc kubenswrapper[4902]: I1202 14:36:43.758121 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 02 14:36:43 crc kubenswrapper[4902]: I1202 14:36:43.826750 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 02 14:36:45 crc kubenswrapper[4902]: I1202 14:36:45.469175 4902 generic.go:334] "Generic (PLEG): container finished" podID="52b3cd40-3726-4b52-8e64-8b15f5f02a99" containerID="3c8201f92fd1f7fd470adbe18107a7bf7b6d8b7a231eae1e82cff6b8205915ba" exitCode=0 Dec 02 14:36:45 crc kubenswrapper[4902]: I1202 14:36:45.469343 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rgnm2" event={"ID":"52b3cd40-3726-4b52-8e64-8b15f5f02a99","Type":"ContainerDied","Data":"3c8201f92fd1f7fd470adbe18107a7bf7b6d8b7a231eae1e82cff6b8205915ba"} Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.033882 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7n2ss" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.052032 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.084359 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-combined-ca-bundle\") pod \"9c39a4fe-20e0-458b-a226-d6f5ad9cb846\" (UID: \"9c39a4fe-20e0-458b-a226-d6f5ad9cb846\") " Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.084439 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-db-sync-config-data\") pod \"9c39a4fe-20e0-458b-a226-d6f5ad9cb846\" (UID: \"9c39a4fe-20e0-458b-a226-d6f5ad9cb846\") " Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.084620 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5qgb\" (UniqueName: \"kubernetes.io/projected/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-kube-api-access-r5qgb\") pod \"9c39a4fe-20e0-458b-a226-d6f5ad9cb846\" (UID: \"9c39a4fe-20e0-458b-a226-d6f5ad9cb846\") " Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.092711 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "9c39a4fe-20e0-458b-a226-d6f5ad9cb846" (UID: "9c39a4fe-20e0-458b-a226-d6f5ad9cb846"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.102436 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-kube-api-access-r5qgb" (OuterVolumeSpecName: "kube-api-access-r5qgb") pod "9c39a4fe-20e0-458b-a226-d6f5ad9cb846" (UID: "9c39a4fe-20e0-458b-a226-d6f5ad9cb846"). InnerVolumeSpecName "kube-api-access-r5qgb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.143819 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9c39a4fe-20e0-458b-a226-d6f5ad9cb846" (UID: "9c39a4fe-20e0-458b-a226-d6f5ad9cb846"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.187054 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-db-sync-config-data\") pod \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.187711 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-combined-ca-bundle\") pod \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.187762 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/52b3cd40-3726-4b52-8e64-8b15f5f02a99-etc-machine-id\") pod \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.187794 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-scripts\") pod \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.188120 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rsp\" (UniqueName: \"kubernetes.io/projected/52b3cd40-3726-4b52-8e64-8b15f5f02a99-kube-api-access-w9rsp\") pod \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.188193 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-config-data\") pod \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\" (UID: \"52b3cd40-3726-4b52-8e64-8b15f5f02a99\") " Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.188516 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/52b3cd40-3726-4b52-8e64-8b15f5f02a99-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "52b3cd40-3726-4b52-8e64-8b15f5f02a99" (UID: "52b3cd40-3726-4b52-8e64-8b15f5f02a99"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.190714 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "52b3cd40-3726-4b52-8e64-8b15f5f02a99" (UID: "52b3cd40-3726-4b52-8e64-8b15f5f02a99"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.191126 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5qgb\" (UniqueName: \"kubernetes.io/projected/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-kube-api-access-r5qgb\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.191205 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.191262 4902 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.191651 4902 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9c39a4fe-20e0-458b-a226-d6f5ad9cb846-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.191716 4902 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/52b3cd40-3726-4b52-8e64-8b15f5f02a99-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.193768 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52b3cd40-3726-4b52-8e64-8b15f5f02a99-kube-api-access-w9rsp" (OuterVolumeSpecName: "kube-api-access-w9rsp") pod "52b3cd40-3726-4b52-8e64-8b15f5f02a99" (UID: "52b3cd40-3726-4b52-8e64-8b15f5f02a99"). InnerVolumeSpecName "kube-api-access-w9rsp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.193876 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-scripts" (OuterVolumeSpecName: "scripts") pod "52b3cd40-3726-4b52-8e64-8b15f5f02a99" (UID: "52b3cd40-3726-4b52-8e64-8b15f5f02a99"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.222751 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "52b3cd40-3726-4b52-8e64-8b15f5f02a99" (UID: "52b3cd40-3726-4b52-8e64-8b15f5f02a99"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.248946 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-config-data" (OuterVolumeSpecName: "config-data") pod "52b3cd40-3726-4b52-8e64-8b15f5f02a99" (UID: "52b3cd40-3726-4b52-8e64-8b15f5f02a99"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.294043 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rsp\" (UniqueName: \"kubernetes.io/projected/52b3cd40-3726-4b52-8e64-8b15f5f02a99-kube-api-access-w9rsp\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.294248 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.294462 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.294657 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52b3cd40-3726-4b52-8e64-8b15f5f02a99-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.497718 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rgnm2" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.497764 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rgnm2" event={"ID":"52b3cd40-3726-4b52-8e64-8b15f5f02a99","Type":"ContainerDied","Data":"5e30f8ba1c6660dbd143db2d874a9e1f9d253a0a0e857a141a652c049535cd49"} Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.497925 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e30f8ba1c6660dbd143db2d874a9e1f9d253a0a0e857a141a652c049535cd49" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.514906 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="ceilometer-central-agent" containerID="cri-o://879e345ac3d76cfa21c5fabe88893c7255e3e883ab581ed742ae9e95511719ba" gracePeriod=30 Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.517146 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97eb051a-3187-447f-8bf6-71ae3d8f65d7","Type":"ContainerStarted","Data":"6ad0be2a6011fab01a733c371f6b3550cb26144e147069a3abf47cc34959d3a8"} Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.517381 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.518056 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="sg-core" containerID="cri-o://9c2dd45401f43a833b75f162e6e5b5c3596b524aeab9e1347a8075b1bd83e5c8" gracePeriod=30 Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.518132 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="proxy-httpd" containerID="cri-o://6ad0be2a6011fab01a733c371f6b3550cb26144e147069a3abf47cc34959d3a8" gracePeriod=30 Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.518157 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="ceilometer-notification-agent" containerID="cri-o://013164b6696fd7149ad61d243d570d02b3d5bba4c3f2455b75da7bdb542d8820" gracePeriod=30 Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.527104 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7n2ss" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.530393 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7n2ss" event={"ID":"9c39a4fe-20e0-458b-a226-d6f5ad9cb846","Type":"ContainerDied","Data":"ed49014e35c9637e1292929765f60308de670f1a4fbee43f35b8baf912a51c6a"} Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.530555 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed49014e35c9637e1292929765f60308de670f1a4fbee43f35b8baf912a51c6a" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.822920 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.529261307 podStartE2EDuration="1m6.822905925s" podCreationTimestamp="2025-12-02 14:35:41 +0000 UTC" firstStartedPulling="2025-12-02 14:35:43.799057163 +0000 UTC m=+1174.990365872" lastFinishedPulling="2025-12-02 14:36:47.092701781 +0000 UTC m=+1238.284010490" observedRunningTime="2025-12-02 14:36:47.564128988 +0000 UTC m=+1238.755437747" watchObservedRunningTime="2025-12-02 14:36:47.822905925 +0000 UTC m=+1239.014214634" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.823374 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 14:36:47 crc kubenswrapper[4902]: E1202 14:36:47.824061 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c39a4fe-20e0-458b-a226-d6f5ad9cb846" containerName="barbican-db-sync" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.824080 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c39a4fe-20e0-458b-a226-d6f5ad9cb846" containerName="barbican-db-sync" Dec 02 14:36:47 crc kubenswrapper[4902]: E1202 14:36:47.824094 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52b3cd40-3726-4b52-8e64-8b15f5f02a99" containerName="cinder-db-sync" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.824101 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="52b3cd40-3726-4b52-8e64-8b15f5f02a99" containerName="cinder-db-sync" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.824285 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c39a4fe-20e0-458b-a226-d6f5ad9cb846" containerName="barbican-db-sync" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.824318 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="52b3cd40-3726-4b52-8e64-8b15f5f02a99" containerName="cinder-db-sync" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.831705 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.839393 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.839440 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.839754 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.839846 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-n6scg" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.860083 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.922030 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.927932 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-scripts\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.928013 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwshr\" (UniqueName: \"kubernetes.io/projected/25604397-9797-48ac-ad30-bf8b69b00bfa-kube-api-access-kwshr\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.928055 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.928071 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/25604397-9797-48ac-ad30-bf8b69b00bfa-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.928112 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-config-data\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.928134 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.954270 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b895b5785-b4lz5"] Dec 02 14:36:47 crc kubenswrapper[4902]: I1202 14:36:47.955901 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.029948 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/25604397-9797-48ac-ad30-bf8b69b00bfa-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.029997 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.030046 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-config-data\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.030076 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.030107 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-config\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.030204 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-scripts\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.030225 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-ovsdbserver-sb\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.030263 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-ovsdbserver-nb\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.030285 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-dns-svc\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.030306 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-dns-swift-storage-0\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.030347 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwshr\" (UniqueName: \"kubernetes.io/projected/25604397-9797-48ac-ad30-bf8b69b00bfa-kube-api-access-kwshr\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.030378 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gd9t8\" (UniqueName: \"kubernetes.io/projected/62f7e26a-463d-4c12-a7e4-1421493143a9-kube-api-access-gd9t8\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.030493 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/25604397-9797-48ac-ad30-bf8b69b00bfa-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.034732 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b895b5785-b4lz5"] Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.038525 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.040907 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.062969 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-scripts\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.064552 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwshr\" (UniqueName: \"kubernetes.io/projected/25604397-9797-48ac-ad30-bf8b69b00bfa-kube-api-access-kwshr\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.066757 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-config-data\") pod \"cinder-scheduler-0\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " pod="openstack/cinder-scheduler-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.132836 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-ovsdbserver-sb\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.132899 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-ovsdbserver-nb\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.132923 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-dns-svc\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.132948 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-dns-swift-storage-0\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.132993 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gd9t8\" (UniqueName: \"kubernetes.io/projected/62f7e26a-463d-4c12-a7e4-1421493143a9-kube-api-access-gd9t8\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.133062 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-config\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.133992 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-ovsdbserver-sb\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.144089 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-dns-svc\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.144647 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-ovsdbserver-nb\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.145287 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-config\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.149448 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-dns-swift-storage-0\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.168149 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gd9t8\" (UniqueName: \"kubernetes.io/projected/62f7e26a-463d-4c12-a7e4-1421493143a9-kube-api-access-gd9t8\") pod \"dnsmasq-dns-b895b5785-b4lz5\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.168204 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.171487 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.175142 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.176028 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.179907 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.301918 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.341310 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-scripts\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.341354 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.341380 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cx8v\" (UniqueName: \"kubernetes.io/projected/a0488186-4ece-4711-90d9-0adb7c0663c5-kube-api-access-4cx8v\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.341430 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a0488186-4ece-4711-90d9-0adb7c0663c5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.341470 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0488186-4ece-4711-90d9-0adb7c0663c5-logs\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.341535 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-config-data\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.341553 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-config-data-custom\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.342548 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d"] Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.344182 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.348899 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.349160 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-7dp8d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.349755 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.371762 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5d54dc89f-fxxfb"] Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.373603 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.375497 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.409812 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d"] Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.430092 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5d54dc89f-fxxfb"] Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.452922 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64-logs\") pod \"barbican-keystone-listener-7cb4b5cd86-6nl4d\" (UID: \"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64\") " pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.453008 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64-config-data\") pod \"barbican-keystone-listener-7cb4b5cd86-6nl4d\" (UID: \"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64\") " pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.453048 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-config-data\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.453077 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-config-data-custom\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.453108 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64-combined-ca-bundle\") pod \"barbican-keystone-listener-7cb4b5cd86-6nl4d\" (UID: \"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64\") " pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.453139 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gdp8\" (UniqueName: \"kubernetes.io/projected/1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64-kube-api-access-5gdp8\") pod \"barbican-keystone-listener-7cb4b5cd86-6nl4d\" (UID: \"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64\") " pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.453167 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-scripts\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.453187 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.453216 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cx8v\" (UniqueName: \"kubernetes.io/projected/a0488186-4ece-4711-90d9-0adb7c0663c5-kube-api-access-4cx8v\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.453493 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a0488186-4ece-4711-90d9-0adb7c0663c5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.462820 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a0488186-4ece-4711-90d9-0adb7c0663c5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.464123 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0488186-4ece-4711-90d9-0adb7c0663c5-logs\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.453553 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0488186-4ece-4711-90d9-0adb7c0663c5-logs\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.468347 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64-config-data-custom\") pod \"barbican-keystone-listener-7cb4b5cd86-6nl4d\" (UID: \"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64\") " pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.469042 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-config-data-custom\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.469087 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-config-data\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.482187 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-scripts\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.482825 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.487659 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b895b5785-b4lz5"] Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.500986 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cx8v\" (UniqueName: \"kubernetes.io/projected/a0488186-4ece-4711-90d9-0adb7c0663c5-kube-api-access-4cx8v\") pod \"cinder-api-0\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.517632 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-vtdqn"] Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.519307 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.560880 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.573326 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-vtdqn"] Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.603525 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gdp8\" (UniqueName: \"kubernetes.io/projected/1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64-kube-api-access-5gdp8\") pod \"barbican-keystone-listener-7cb4b5cd86-6nl4d\" (UID: \"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64\") " pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.603734 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea5327c-b120-4e9b-876c-94e46621dcad-combined-ca-bundle\") pod \"barbican-worker-5d54dc89f-fxxfb\" (UID: \"bea5327c-b120-4e9b-876c-94e46621dcad\") " pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.603754 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bea5327c-b120-4e9b-876c-94e46621dcad-logs\") pod \"barbican-worker-5d54dc89f-fxxfb\" (UID: \"bea5327c-b120-4e9b-876c-94e46621dcad\") " pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.603780 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64-config-data-custom\") pod \"barbican-keystone-listener-7cb4b5cd86-6nl4d\" (UID: \"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64\") " pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.603800 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bea5327c-b120-4e9b-876c-94e46621dcad-config-data-custom\") pod \"barbican-worker-5d54dc89f-fxxfb\" (UID: \"bea5327c-b120-4e9b-876c-94e46621dcad\") " pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.603818 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bea5327c-b120-4e9b-876c-94e46621dcad-config-data\") pod \"barbican-worker-5d54dc89f-fxxfb\" (UID: \"bea5327c-b120-4e9b-876c-94e46621dcad\") " pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.603837 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zlp6\" (UniqueName: \"kubernetes.io/projected/bea5327c-b120-4e9b-876c-94e46621dcad-kube-api-access-4zlp6\") pod \"barbican-worker-5d54dc89f-fxxfb\" (UID: \"bea5327c-b120-4e9b-876c-94e46621dcad\") " pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.603869 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64-logs\") pod \"barbican-keystone-listener-7cb4b5cd86-6nl4d\" (UID: \"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64\") " pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.603904 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64-config-data\") pod \"barbican-keystone-listener-7cb4b5cd86-6nl4d\" (UID: \"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64\") " pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.603936 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64-combined-ca-bundle\") pod \"barbican-keystone-listener-7cb4b5cd86-6nl4d\" (UID: \"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64\") " pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.604769 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64-logs\") pod \"barbican-keystone-listener-7cb4b5cd86-6nl4d\" (UID: \"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64\") " pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.615195 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-54b4cc97b8-h6x6n"] Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.617177 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.620930 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.621011 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64-combined-ca-bundle\") pod \"barbican-keystone-listener-7cb4b5cd86-6nl4d\" (UID: \"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64\") " pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.621710 4902 generic.go:334] "Generic (PLEG): container finished" podID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerID="6ad0be2a6011fab01a733c371f6b3550cb26144e147069a3abf47cc34959d3a8" exitCode=0 Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.621728 4902 generic.go:334] "Generic (PLEG): container finished" podID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerID="9c2dd45401f43a833b75f162e6e5b5c3596b524aeab9e1347a8075b1bd83e5c8" exitCode=2 Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.621736 4902 generic.go:334] "Generic (PLEG): container finished" podID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerID="879e345ac3d76cfa21c5fabe88893c7255e3e883ab581ed742ae9e95511719ba" exitCode=0 Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.621754 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97eb051a-3187-447f-8bf6-71ae3d8f65d7","Type":"ContainerDied","Data":"6ad0be2a6011fab01a733c371f6b3550cb26144e147069a3abf47cc34959d3a8"} Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.621777 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97eb051a-3187-447f-8bf6-71ae3d8f65d7","Type":"ContainerDied","Data":"9c2dd45401f43a833b75f162e6e5b5c3596b524aeab9e1347a8075b1bd83e5c8"} Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.621786 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97eb051a-3187-447f-8bf6-71ae3d8f65d7","Type":"ContainerDied","Data":"879e345ac3d76cfa21c5fabe88893c7255e3e883ab581ed742ae9e95511719ba"} Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.624739 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64-config-data\") pod \"barbican-keystone-listener-7cb4b5cd86-6nl4d\" (UID: \"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64\") " pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.628422 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64-config-data-custom\") pod \"barbican-keystone-listener-7cb4b5cd86-6nl4d\" (UID: \"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64\") " pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.694113 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gdp8\" (UniqueName: \"kubernetes.io/projected/1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64-kube-api-access-5gdp8\") pod \"barbican-keystone-listener-7cb4b5cd86-6nl4d\" (UID: \"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64\") " pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.710677 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.710730 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea5327c-b120-4e9b-876c-94e46621dcad-combined-ca-bundle\") pod \"barbican-worker-5d54dc89f-fxxfb\" (UID: \"bea5327c-b120-4e9b-876c-94e46621dcad\") " pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.710751 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bea5327c-b120-4e9b-876c-94e46621dcad-logs\") pod \"barbican-worker-5d54dc89f-fxxfb\" (UID: \"bea5327c-b120-4e9b-876c-94e46621dcad\") " pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.710780 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.710796 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bea5327c-b120-4e9b-876c-94e46621dcad-config-data-custom\") pod \"barbican-worker-5d54dc89f-fxxfb\" (UID: \"bea5327c-b120-4e9b-876c-94e46621dcad\") " pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.710820 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bea5327c-b120-4e9b-876c-94e46621dcad-config-data\") pod \"barbican-worker-5d54dc89f-fxxfb\" (UID: \"bea5327c-b120-4e9b-876c-94e46621dcad\") " pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.710840 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zlp6\" (UniqueName: \"kubernetes.io/projected/bea5327c-b120-4e9b-876c-94e46621dcad-kube-api-access-4zlp6\") pod \"barbican-worker-5d54dc89f-fxxfb\" (UID: \"bea5327c-b120-4e9b-876c-94e46621dcad\") " pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.710864 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.710888 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-config\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.710929 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.710969 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcsrw\" (UniqueName: \"kubernetes.io/projected/bddd6267-1a8a-4783-a674-4037dc5c52da-kube-api-access-zcsrw\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.726423 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bea5327c-b120-4e9b-876c-94e46621dcad-logs\") pod \"barbican-worker-5d54dc89f-fxxfb\" (UID: \"bea5327c-b120-4e9b-876c-94e46621dcad\") " pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.732366 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bea5327c-b120-4e9b-876c-94e46621dcad-config-data-custom\") pod \"barbican-worker-5d54dc89f-fxxfb\" (UID: \"bea5327c-b120-4e9b-876c-94e46621dcad\") " pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.732593 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea5327c-b120-4e9b-876c-94e46621dcad-combined-ca-bundle\") pod \"barbican-worker-5d54dc89f-fxxfb\" (UID: \"bea5327c-b120-4e9b-876c-94e46621dcad\") " pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.737548 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bea5327c-b120-4e9b-876c-94e46621dcad-config-data\") pod \"barbican-worker-5d54dc89f-fxxfb\" (UID: \"bea5327c-b120-4e9b-876c-94e46621dcad\") " pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.760219 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zlp6\" (UniqueName: \"kubernetes.io/projected/bea5327c-b120-4e9b-876c-94e46621dcad-kube-api-access-4zlp6\") pod \"barbican-worker-5d54dc89f-fxxfb\" (UID: \"bea5327c-b120-4e9b-876c-94e46621dcad\") " pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.795638 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54b4cc97b8-h6x6n"] Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.815035 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-config-data\") pod \"barbican-api-54b4cc97b8-h6x6n\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.815076 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.815121 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.815144 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-combined-ca-bundle\") pod \"barbican-api-54b4cc97b8-h6x6n\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.815182 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.815206 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-config-data-custom\") pod \"barbican-api-54b4cc97b8-h6x6n\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.815222 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktp28\" (UniqueName: \"kubernetes.io/projected/e27977c3-7e5e-47b7-b217-96acb16119c2-kube-api-access-ktp28\") pod \"barbican-api-54b4cc97b8-h6x6n\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.815241 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-config\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.815283 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.815323 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e27977c3-7e5e-47b7-b217-96acb16119c2-logs\") pod \"barbican-api-54b4cc97b8-h6x6n\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.815342 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcsrw\" (UniqueName: \"kubernetes.io/projected/bddd6267-1a8a-4783-a674-4037dc5c52da-kube-api-access-zcsrw\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.816305 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.816325 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.816846 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-config\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.816955 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.817365 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.837037 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.859286 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.875523 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcsrw\" (UniqueName: \"kubernetes.io/projected/bddd6267-1a8a-4783-a674-4037dc5c52da-kube-api-access-zcsrw\") pod \"dnsmasq-dns-5c9776ccc5-vtdqn\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.906958 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.921098 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-config-data-custom\") pod \"barbican-api-54b4cc97b8-h6x6n\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.921146 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktp28\" (UniqueName: \"kubernetes.io/projected/e27977c3-7e5e-47b7-b217-96acb16119c2-kube-api-access-ktp28\") pod \"barbican-api-54b4cc97b8-h6x6n\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.921229 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e27977c3-7e5e-47b7-b217-96acb16119c2-logs\") pod \"barbican-api-54b4cc97b8-h6x6n\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.921278 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-config-data\") pod \"barbican-api-54b4cc97b8-h6x6n\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.921331 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-combined-ca-bundle\") pod \"barbican-api-54b4cc97b8-h6x6n\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.923534 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e27977c3-7e5e-47b7-b217-96acb16119c2-logs\") pod \"barbican-api-54b4cc97b8-h6x6n\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.929213 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-config-data-custom\") pod \"barbican-api-54b4cc97b8-h6x6n\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.934699 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-config-data\") pod \"barbican-api-54b4cc97b8-h6x6n\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.939031 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-combined-ca-bundle\") pod \"barbican-api-54b4cc97b8-h6x6n\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.941994 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktp28\" (UniqueName: \"kubernetes.io/projected/e27977c3-7e5e-47b7-b217-96acb16119c2-kube-api-access-ktp28\") pod \"barbican-api-54b4cc97b8-h6x6n\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.976221 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:48 crc kubenswrapper[4902]: I1202 14:36:48.977580 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" Dec 02 14:36:49 crc kubenswrapper[4902]: I1202 14:36:49.001681 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 14:36:49 crc kubenswrapper[4902]: I1202 14:36:49.009003 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5d54dc89f-fxxfb" Dec 02 14:36:49 crc kubenswrapper[4902]: W1202 14:36:49.255071 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62f7e26a_463d_4c12_a7e4_1421493143a9.slice/crio-23d871b96113307630a395ba057d42c1a940f8b021ee71677bcb4186b2867470 WatchSource:0}: Error finding container 23d871b96113307630a395ba057d42c1a940f8b021ee71677bcb4186b2867470: Status 404 returned error can't find the container with id 23d871b96113307630a395ba057d42c1a940f8b021ee71677bcb4186b2867470 Dec 02 14:36:49 crc kubenswrapper[4902]: I1202 14:36:49.270054 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b895b5785-b4lz5"] Dec 02 14:36:49 crc kubenswrapper[4902]: I1202 14:36:49.592504 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 14:36:49 crc kubenswrapper[4902]: I1202 14:36:49.639874 4902 generic.go:334] "Generic (PLEG): container finished" podID="62f7e26a-463d-4c12-a7e4-1421493143a9" containerID="1a8b47b175e33a9c5a14c0d27fe906170d4fb3ef41e99e7ff943b3971d01df2b" exitCode=0 Dec 02 14:36:49 crc kubenswrapper[4902]: I1202 14:36:49.640052 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b895b5785-b4lz5" event={"ID":"62f7e26a-463d-4c12-a7e4-1421493143a9","Type":"ContainerDied","Data":"1a8b47b175e33a9c5a14c0d27fe906170d4fb3ef41e99e7ff943b3971d01df2b"} Dec 02 14:36:49 crc kubenswrapper[4902]: I1202 14:36:49.640177 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b895b5785-b4lz5" event={"ID":"62f7e26a-463d-4c12-a7e4-1421493143a9","Type":"ContainerStarted","Data":"23d871b96113307630a395ba057d42c1a940f8b021ee71677bcb4186b2867470"} Dec 02 14:36:49 crc kubenswrapper[4902]: I1202 14:36:49.652128 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"25604397-9797-48ac-ad30-bf8b69b00bfa","Type":"ContainerStarted","Data":"1e00e0ab5d613bc78bcd14b882f380558215256845620f269fc98337e56aa1de"} Dec 02 14:36:49 crc kubenswrapper[4902]: I1202 14:36:49.671474 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 02 14:36:49 crc kubenswrapper[4902]: I1202 14:36:49.767498 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-vtdqn"] Dec 02 14:36:49 crc kubenswrapper[4902]: I1202 14:36:49.816107 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d"] Dec 02 14:36:49 crc kubenswrapper[4902]: I1202 14:36:49.847452 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54b4cc97b8-h6x6n"] Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.042724 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5d54dc89f-fxxfb"] Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.333176 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.477895 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gd9t8\" (UniqueName: \"kubernetes.io/projected/62f7e26a-463d-4c12-a7e4-1421493143a9-kube-api-access-gd9t8\") pod \"62f7e26a-463d-4c12-a7e4-1421493143a9\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.478493 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-dns-swift-storage-0\") pod \"62f7e26a-463d-4c12-a7e4-1421493143a9\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.478605 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-ovsdbserver-nb\") pod \"62f7e26a-463d-4c12-a7e4-1421493143a9\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.478632 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-ovsdbserver-sb\") pod \"62f7e26a-463d-4c12-a7e4-1421493143a9\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.478658 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-dns-svc\") pod \"62f7e26a-463d-4c12-a7e4-1421493143a9\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.478731 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-config\") pod \"62f7e26a-463d-4c12-a7e4-1421493143a9\" (UID: \"62f7e26a-463d-4c12-a7e4-1421493143a9\") " Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.482011 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7bffdc6c65-srhkc" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.485058 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62f7e26a-463d-4c12-a7e4-1421493143a9-kube-api-access-gd9t8" (OuterVolumeSpecName: "kube-api-access-gd9t8") pod "62f7e26a-463d-4c12-a7e4-1421493143a9" (UID: "62f7e26a-463d-4c12-a7e4-1421493143a9"). InnerVolumeSpecName "kube-api-access-gd9t8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.503227 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.513988 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "62f7e26a-463d-4c12-a7e4-1421493143a9" (UID: "62f7e26a-463d-4c12-a7e4-1421493143a9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.534442 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "62f7e26a-463d-4c12-a7e4-1421493143a9" (UID: "62f7e26a-463d-4c12-a7e4-1421493143a9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.535952 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.536044 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-config" (OuterVolumeSpecName: "config") pod "62f7e26a-463d-4c12-a7e4-1421493143a9" (UID: "62f7e26a-463d-4c12-a7e4-1421493143a9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.539243 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "62f7e26a-463d-4c12-a7e4-1421493143a9" (UID: "62f7e26a-463d-4c12-a7e4-1421493143a9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.542899 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "62f7e26a-463d-4c12-a7e4-1421493143a9" (UID: "62f7e26a-463d-4c12-a7e4-1421493143a9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.578568 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-75bd8f464d-cnfhg"] Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.578983 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-75bd8f464d-cnfhg" podUID="81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" containerName="neutron-api" containerID="cri-o://e70e649b966f159ca38555efb648a7d2e58df64c8faddb40e1d59e95314ffa61" gracePeriod=30 Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.579428 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-75bd8f464d-cnfhg" podUID="81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" containerName="neutron-httpd" containerID="cri-o://8c2e85f88f046ec76e1ebc10ea8c97f4f5117270855f91fc596e16ac337c19bc" gracePeriod=30 Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.588338 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gd9t8\" (UniqueName: \"kubernetes.io/projected/62f7e26a-463d-4c12-a7e4-1421493143a9-kube-api-access-gd9t8\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.588457 4902 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.588526 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.588595 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.588665 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.588717 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62f7e26a-463d-4c12-a7e4-1421493143a9-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.698695 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97eb051a-3187-447f-8bf6-71ae3d8f65d7-run-httpd\") pod \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.698914 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-combined-ca-bundle\") pod \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.699311 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-config-data\") pod \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.699213 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97eb051a-3187-447f-8bf6-71ae3d8f65d7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "97eb051a-3187-447f-8bf6-71ae3d8f65d7" (UID: "97eb051a-3187-447f-8bf6-71ae3d8f65d7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.699453 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-sg-core-conf-yaml\") pod \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.699704 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97eb051a-3187-447f-8bf6-71ae3d8f65d7-log-httpd\") pod \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.699781 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-886wf\" (UniqueName: \"kubernetes.io/projected/97eb051a-3187-447f-8bf6-71ae3d8f65d7-kube-api-access-886wf\") pod \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.699822 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-scripts\") pod \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\" (UID: \"97eb051a-3187-447f-8bf6-71ae3d8f65d7\") " Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.700678 4902 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97eb051a-3187-447f-8bf6-71ae3d8f65d7-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.711183 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97eb051a-3187-447f-8bf6-71ae3d8f65d7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "97eb051a-3187-447f-8bf6-71ae3d8f65d7" (UID: "97eb051a-3187-447f-8bf6-71ae3d8f65d7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.714729 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97eb051a-3187-447f-8bf6-71ae3d8f65d7-kube-api-access-886wf" (OuterVolumeSpecName: "kube-api-access-886wf") pod "97eb051a-3187-447f-8bf6-71ae3d8f65d7" (UID: "97eb051a-3187-447f-8bf6-71ae3d8f65d7"). InnerVolumeSpecName "kube-api-access-886wf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.717096 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-scripts" (OuterVolumeSpecName: "scripts") pod "97eb051a-3187-447f-8bf6-71ae3d8f65d7" (UID: "97eb051a-3187-447f-8bf6-71ae3d8f65d7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.731606 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b895b5785-b4lz5" event={"ID":"62f7e26a-463d-4c12-a7e4-1421493143a9","Type":"ContainerDied","Data":"23d871b96113307630a395ba057d42c1a940f8b021ee71677bcb4186b2867470"} Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.731651 4902 scope.go:117] "RemoveContainer" containerID="1a8b47b175e33a9c5a14c0d27fe906170d4fb3ef41e99e7ff943b3971d01df2b" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.731756 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b895b5785-b4lz5" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.767423 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-85bf5878d8-bn7cr" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.159:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.159:8443: connect: connection refused" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.767539 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.774379 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924"} pod="openstack/horizon-85bf5878d8-bn7cr" containerMessage="Container horizon failed startup probe, will be restarted" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.774740 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-85bf5878d8-bn7cr" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerName="horizon" containerID="cri-o://462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924" gracePeriod=30 Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.802068 4902 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97eb051a-3187-447f-8bf6-71ae3d8f65d7-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.802315 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-886wf\" (UniqueName: \"kubernetes.io/projected/97eb051a-3187-447f-8bf6-71ae3d8f65d7-kube-api-access-886wf\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.802426 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.803235 4902 generic.go:334] "Generic (PLEG): container finished" podID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerID="013164b6696fd7149ad61d243d570d02b3d5bba4c3f2455b75da7bdb542d8820" exitCode=0 Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.803347 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97eb051a-3187-447f-8bf6-71ae3d8f65d7","Type":"ContainerDied","Data":"013164b6696fd7149ad61d243d570d02b3d5bba4c3f2455b75da7bdb542d8820"} Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.803423 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97eb051a-3187-447f-8bf6-71ae3d8f65d7","Type":"ContainerDied","Data":"5a2c3ba3e052ad7136111c6e6c7a22e4b076d6e8a151313fb5b7f477f25ae2c6"} Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.803483 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.823765 4902 scope.go:117] "RemoveContainer" containerID="6ad0be2a6011fab01a733c371f6b3550cb26144e147069a3abf47cc34959d3a8" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.870634 4902 generic.go:334] "Generic (PLEG): container finished" podID="705850b6-c4e0-43ae-a1b2-1a8c536b97bd" containerID="2236e2125cfd0dd2462179437852250a37fae12330f0da057403262ac6598fdc" exitCode=137 Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.870662 4902 generic.go:334] "Generic (PLEG): container finished" podID="705850b6-c4e0-43ae-a1b2-1a8c536b97bd" containerID="bd75a7da1a4e580e605781dffd69856bad4b9b2e0d9ea492f0939f90a56b60c1" exitCode=137 Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.870670 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6cdbd8859c-g8rmc" event={"ID":"705850b6-c4e0-43ae-a1b2-1a8c536b97bd","Type":"ContainerDied","Data":"2236e2125cfd0dd2462179437852250a37fae12330f0da057403262ac6598fdc"} Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.870723 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6cdbd8859c-g8rmc" event={"ID":"705850b6-c4e0-43ae-a1b2-1a8c536b97bd","Type":"ContainerDied","Data":"bd75a7da1a4e580e605781dffd69856bad4b9b2e0d9ea492f0939f90a56b60c1"} Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.874251 4902 generic.go:334] "Generic (PLEG): container finished" podID="bddd6267-1a8a-4783-a674-4037dc5c52da" containerID="4515363a23d7504efb117817b199725fa08dd4d30ceaf6dea537d46045254e29" exitCode=0 Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.874304 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" event={"ID":"bddd6267-1a8a-4783-a674-4037dc5c52da","Type":"ContainerDied","Data":"4515363a23d7504efb117817b199725fa08dd4d30ceaf6dea537d46045254e29"} Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.874328 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" event={"ID":"bddd6267-1a8a-4783-a674-4037dc5c52da","Type":"ContainerStarted","Data":"b7d5dd633050bcd2bfbac0acb36094d4b7b7de00ad465c153c34adcdd4a2e011"} Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.880444 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b895b5785-b4lz5"] Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.885033 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a0488186-4ece-4711-90d9-0adb7c0663c5","Type":"ContainerStarted","Data":"5c0d97249fba417949e780668d1151adb2be86f5fd588c1b11c262b7ceab41fa"} Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.892505 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d54dc89f-fxxfb" event={"ID":"bea5327c-b120-4e9b-876c-94e46621dcad","Type":"ContainerStarted","Data":"7967f55e5a1e1acaf0d9b89bbc87666121f152c26431ae47f7efd0c6b0b0c51d"} Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.894212 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "97eb051a-3187-447f-8bf6-71ae3d8f65d7" (UID: "97eb051a-3187-447f-8bf6-71ae3d8f65d7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.894495 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" event={"ID":"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64","Type":"ContainerStarted","Data":"f0a50850701a96e49cc59558ed0310e17501e26b9b78f9b8398f8090350ff810"} Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.908969 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-config-data" (OuterVolumeSpecName: "config-data") pod "97eb051a-3187-447f-8bf6-71ae3d8f65d7" (UID: "97eb051a-3187-447f-8bf6-71ae3d8f65d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.911359 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54b4cc97b8-h6x6n" event={"ID":"e27977c3-7e5e-47b7-b217-96acb16119c2","Type":"ContainerStarted","Data":"9330f452e963c1ddaf725f531a15ebfa67af0e359e755b9f05c50d6806dbb737"} Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.911427 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54b4cc97b8-h6x6n" event={"ID":"e27977c3-7e5e-47b7-b217-96acb16119c2","Type":"ContainerStarted","Data":"aeb136aeaa58efb731b307b58754c781a01d030af4218ec3cbd11bd999a798c1"} Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.923416 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.924703 4902 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.926500 4902 scope.go:117] "RemoveContainer" containerID="9c2dd45401f43a833b75f162e6e5b5c3596b524aeab9e1347a8075b1bd83e5c8" Dec 02 14:36:50 crc kubenswrapper[4902]: I1202 14:36:50.931899 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b895b5785-b4lz5"] Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.083705 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97eb051a-3187-447f-8bf6-71ae3d8f65d7" (UID: "97eb051a-3187-447f-8bf6-71ae3d8f65d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.131045 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97eb051a-3187-447f-8bf6-71ae3d8f65d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.147851 4902 scope.go:117] "RemoveContainer" containerID="013164b6696fd7149ad61d243d570d02b3d5bba4c3f2455b75da7bdb542d8820" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.157036 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62f7e26a-463d-4c12-a7e4-1421493143a9" path="/var/lib/kubelet/pods/62f7e26a-463d-4c12-a7e4-1421493143a9/volumes" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.268730 4902 scope.go:117] "RemoveContainer" containerID="879e345ac3d76cfa21c5fabe88893c7255e3e883ab581ed742ae9e95511719ba" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.289313 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.364400 4902 scope.go:117] "RemoveContainer" containerID="6ad0be2a6011fab01a733c371f6b3550cb26144e147069a3abf47cc34959d3a8" Dec 02 14:36:51 crc kubenswrapper[4902]: E1202 14:36:51.364901 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ad0be2a6011fab01a733c371f6b3550cb26144e147069a3abf47cc34959d3a8\": container with ID starting with 6ad0be2a6011fab01a733c371f6b3550cb26144e147069a3abf47cc34959d3a8 not found: ID does not exist" containerID="6ad0be2a6011fab01a733c371f6b3550cb26144e147069a3abf47cc34959d3a8" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.364924 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ad0be2a6011fab01a733c371f6b3550cb26144e147069a3abf47cc34959d3a8"} err="failed to get container status \"6ad0be2a6011fab01a733c371f6b3550cb26144e147069a3abf47cc34959d3a8\": rpc error: code = NotFound desc = could not find container \"6ad0be2a6011fab01a733c371f6b3550cb26144e147069a3abf47cc34959d3a8\": container with ID starting with 6ad0be2a6011fab01a733c371f6b3550cb26144e147069a3abf47cc34959d3a8 not found: ID does not exist" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.364944 4902 scope.go:117] "RemoveContainer" containerID="9c2dd45401f43a833b75f162e6e5b5c3596b524aeab9e1347a8075b1bd83e5c8" Dec 02 14:36:51 crc kubenswrapper[4902]: E1202 14:36:51.365255 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c2dd45401f43a833b75f162e6e5b5c3596b524aeab9e1347a8075b1bd83e5c8\": container with ID starting with 9c2dd45401f43a833b75f162e6e5b5c3596b524aeab9e1347a8075b1bd83e5c8 not found: ID does not exist" containerID="9c2dd45401f43a833b75f162e6e5b5c3596b524aeab9e1347a8075b1bd83e5c8" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.365275 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c2dd45401f43a833b75f162e6e5b5c3596b524aeab9e1347a8075b1bd83e5c8"} err="failed to get container status \"9c2dd45401f43a833b75f162e6e5b5c3596b524aeab9e1347a8075b1bd83e5c8\": rpc error: code = NotFound desc = could not find container \"9c2dd45401f43a833b75f162e6e5b5c3596b524aeab9e1347a8075b1bd83e5c8\": container with ID starting with 9c2dd45401f43a833b75f162e6e5b5c3596b524aeab9e1347a8075b1bd83e5c8 not found: ID does not exist" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.365288 4902 scope.go:117] "RemoveContainer" containerID="013164b6696fd7149ad61d243d570d02b3d5bba4c3f2455b75da7bdb542d8820" Dec 02 14:36:51 crc kubenswrapper[4902]: E1202 14:36:51.365983 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"013164b6696fd7149ad61d243d570d02b3d5bba4c3f2455b75da7bdb542d8820\": container with ID starting with 013164b6696fd7149ad61d243d570d02b3d5bba4c3f2455b75da7bdb542d8820 not found: ID does not exist" containerID="013164b6696fd7149ad61d243d570d02b3d5bba4c3f2455b75da7bdb542d8820" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.366033 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"013164b6696fd7149ad61d243d570d02b3d5bba4c3f2455b75da7bdb542d8820"} err="failed to get container status \"013164b6696fd7149ad61d243d570d02b3d5bba4c3f2455b75da7bdb542d8820\": rpc error: code = NotFound desc = could not find container \"013164b6696fd7149ad61d243d570d02b3d5bba4c3f2455b75da7bdb542d8820\": container with ID starting with 013164b6696fd7149ad61d243d570d02b3d5bba4c3f2455b75da7bdb542d8820 not found: ID does not exist" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.366053 4902 scope.go:117] "RemoveContainer" containerID="879e345ac3d76cfa21c5fabe88893c7255e3e883ab581ed742ae9e95511719ba" Dec 02 14:36:51 crc kubenswrapper[4902]: E1202 14:36:51.367079 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"879e345ac3d76cfa21c5fabe88893c7255e3e883ab581ed742ae9e95511719ba\": container with ID starting with 879e345ac3d76cfa21c5fabe88893c7255e3e883ab581ed742ae9e95511719ba not found: ID does not exist" containerID="879e345ac3d76cfa21c5fabe88893c7255e3e883ab581ed742ae9e95511719ba" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.367102 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"879e345ac3d76cfa21c5fabe88893c7255e3e883ab581ed742ae9e95511719ba"} err="failed to get container status \"879e345ac3d76cfa21c5fabe88893c7255e3e883ab581ed742ae9e95511719ba\": rpc error: code = NotFound desc = could not find container \"879e345ac3d76cfa21c5fabe88893c7255e3e883ab581ed742ae9e95511719ba\": container with ID starting with 879e345ac3d76cfa21c5fabe88893c7255e3e883ab581ed742ae9e95511719ba not found: ID does not exist" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.445326 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-logs\") pod \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.445729 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-horizon-secret-key\") pod \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.445752 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-config-data\") pod \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.445814 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-scripts\") pod \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.445886 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sctsh\" (UniqueName: \"kubernetes.io/projected/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-kube-api-access-sctsh\") pod \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\" (UID: \"705850b6-c4e0-43ae-a1b2-1a8c536b97bd\") " Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.447154 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-logs" (OuterVolumeSpecName: "logs") pod "705850b6-c4e0-43ae-a1b2-1a8c536b97bd" (UID: "705850b6-c4e0-43ae-a1b2-1a8c536b97bd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.451832 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "705850b6-c4e0-43ae-a1b2-1a8c536b97bd" (UID: "705850b6-c4e0-43ae-a1b2-1a8c536b97bd"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.452489 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-kube-api-access-sctsh" (OuterVolumeSpecName: "kube-api-access-sctsh") pod "705850b6-c4e0-43ae-a1b2-1a8c536b97bd" (UID: "705850b6-c4e0-43ae-a1b2-1a8c536b97bd"). InnerVolumeSpecName "kube-api-access-sctsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.479541 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-config-data" (OuterVolumeSpecName: "config-data") pod "705850b6-c4e0-43ae-a1b2-1a8c536b97bd" (UID: "705850b6-c4e0-43ae-a1b2-1a8c536b97bd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.482237 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-scripts" (OuterVolumeSpecName: "scripts") pod "705850b6-c4e0-43ae-a1b2-1a8c536b97bd" (UID: "705850b6-c4e0-43ae-a1b2-1a8c536b97bd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.548216 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.548249 4902 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.548262 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.548270 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.548278 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sctsh\" (UniqueName: \"kubernetes.io/projected/705850b6-c4e0-43ae-a1b2-1a8c536b97bd-kube-api-access-sctsh\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.939747 4902 generic.go:334] "Generic (PLEG): container finished" podID="dcf00bba-e4b0-46e4-afcd-c4a475ff5047" containerID="31fa4fd7dccceddb5792c84cf234eebaa6816a9d8673b80e34cbc681a2ef4189" exitCode=137 Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.940063 4902 generic.go:334] "Generic (PLEG): container finished" podID="dcf00bba-e4b0-46e4-afcd-c4a475ff5047" containerID="9eedcce42209e0a9f4b55b6908860a717e63479586c12d555412f4a2fa66db48" exitCode=137 Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.940116 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6fc8fd9965-7vwnt" event={"ID":"dcf00bba-e4b0-46e4-afcd-c4a475ff5047","Type":"ContainerDied","Data":"31fa4fd7dccceddb5792c84cf234eebaa6816a9d8673b80e34cbc681a2ef4189"} Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.940140 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6fc8fd9965-7vwnt" event={"ID":"dcf00bba-e4b0-46e4-afcd-c4a475ff5047","Type":"ContainerDied","Data":"9eedcce42209e0a9f4b55b6908860a717e63479586c12d555412f4a2fa66db48"} Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.942657 4902 generic.go:334] "Generic (PLEG): container finished" podID="81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" containerID="8c2e85f88f046ec76e1ebc10ea8c97f4f5117270855f91fc596e16ac337c19bc" exitCode=0 Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.942696 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-75bd8f464d-cnfhg" event={"ID":"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c","Type":"ContainerDied","Data":"8c2e85f88f046ec76e1ebc10ea8c97f4f5117270855f91fc596e16ac337c19bc"} Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.949627 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6cdbd8859c-g8rmc" event={"ID":"705850b6-c4e0-43ae-a1b2-1a8c536b97bd","Type":"ContainerDied","Data":"ccf94bc9240894c8012923fb286496cde7de96ee0685062f838780c6d8cb97e8"} Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.949664 4902 scope.go:117] "RemoveContainer" containerID="2236e2125cfd0dd2462179437852250a37fae12330f0da057403262ac6598fdc" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.949779 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6cdbd8859c-g8rmc" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.960020 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" event={"ID":"bddd6267-1a8a-4783-a674-4037dc5c52da","Type":"ContainerStarted","Data":"80f367b146682ff16c836c98a05279148d5d7a5dc75a96004b78b56a7d46cbba"} Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.964860 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.964900 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54b4cc97b8-h6x6n" event={"ID":"e27977c3-7e5e-47b7-b217-96acb16119c2","Type":"ContainerStarted","Data":"c3914c6f96c69b50f09e5cde441306769d8d13bee3e4a902660bac1403330b7f"} Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.965644 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:51 crc kubenswrapper[4902]: I1202 14:36:51.988712 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a0488186-4ece-4711-90d9-0adb7c0663c5","Type":"ContainerStarted","Data":"3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53"} Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.003045 4902 generic.go:334] "Generic (PLEG): container finished" podID="318a2e77-a615-4af8-bade-d574dd90ca91" containerID="d71cc061eb90f6c52cc70e264560970e0903d0adc87c344cb8b96c161dbab470" exitCode=137 Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.003071 4902 generic.go:334] "Generic (PLEG): container finished" podID="318a2e77-a615-4af8-bade-d574dd90ca91" containerID="6f7e670709e8e0842994de3af86ecc3e1d12533a9c5031a5d8ac8b6a608bbf2c" exitCode=137 Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.003090 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-ff5b6d47-dgpgd" event={"ID":"318a2e77-a615-4af8-bade-d574dd90ca91","Type":"ContainerDied","Data":"d71cc061eb90f6c52cc70e264560970e0903d0adc87c344cb8b96c161dbab470"} Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.003112 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-ff5b6d47-dgpgd" event={"ID":"318a2e77-a615-4af8-bade-d574dd90ca91","Type":"ContainerDied","Data":"6f7e670709e8e0842994de3af86ecc3e1d12533a9c5031a5d8ac8b6a608bbf2c"} Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.008515 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" podStartSLOduration=4.008496186 podStartE2EDuration="4.008496186s" podCreationTimestamp="2025-12-02 14:36:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:51.997513707 +0000 UTC m=+1243.188822416" watchObservedRunningTime="2025-12-02 14:36:52.008496186 +0000 UTC m=+1243.199804895" Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.029258 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6cdbd8859c-g8rmc"] Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.044836 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6cdbd8859c-g8rmc"] Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.046174 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-54b4cc97b8-h6x6n" podStartSLOduration=4.046148345 podStartE2EDuration="4.046148345s" podCreationTimestamp="2025-12-02 14:36:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:52.031451712 +0000 UTC m=+1243.222760421" watchObservedRunningTime="2025-12-02 14:36:52.046148345 +0000 UTC m=+1243.237457054" Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.237100 4902 scope.go:117] "RemoveContainer" containerID="bd75a7da1a4e580e605781dffd69856bad4b9b2e0d9ea492f0939f90a56b60c1" Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.344750 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.469916 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-scripts\") pod \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.469965 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-logs\") pod \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.470017 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-horizon-secret-key\") pod \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.470145 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chvbv\" (UniqueName: \"kubernetes.io/projected/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-kube-api-access-chvbv\") pod \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.470218 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-config-data\") pod \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.471969 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-logs" (OuterVolumeSpecName: "logs") pod "dcf00bba-e4b0-46e4-afcd-c4a475ff5047" (UID: "dcf00bba-e4b0-46e4-afcd-c4a475ff5047"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.477688 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-kube-api-access-chvbv" (OuterVolumeSpecName: "kube-api-access-chvbv") pod "dcf00bba-e4b0-46e4-afcd-c4a475ff5047" (UID: "dcf00bba-e4b0-46e4-afcd-c4a475ff5047"). InnerVolumeSpecName "kube-api-access-chvbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.477761 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "dcf00bba-e4b0-46e4-afcd-c4a475ff5047" (UID: "dcf00bba-e4b0-46e4-afcd-c4a475ff5047"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:52 crc kubenswrapper[4902]: E1202 14:36:52.495106 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-scripts podName:dcf00bba-e4b0-46e4-afcd-c4a475ff5047 nodeName:}" failed. No retries permitted until 2025-12-02 14:36:52.995078598 +0000 UTC m=+1244.186387297 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "scripts" (UniqueName: "kubernetes.io/configmap/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-scripts") pod "dcf00bba-e4b0-46e4-afcd-c4a475ff5047" (UID: "dcf00bba-e4b0-46e4-afcd-c4a475ff5047") : error deleting /var/lib/kubelet/pods/dcf00bba-e4b0-46e4-afcd-c4a475ff5047/volume-subpaths: remove /var/lib/kubelet/pods/dcf00bba-e4b0-46e4-afcd-c4a475ff5047/volume-subpaths: no such file or directory Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.495524 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-config-data" (OuterVolumeSpecName: "config-data") pod "dcf00bba-e4b0-46e4-afcd-c4a475ff5047" (UID: "dcf00bba-e4b0-46e4-afcd-c4a475ff5047"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.572300 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.572336 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.572348 4902 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:52 crc kubenswrapper[4902]: I1202 14:36:52.572360 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chvbv\" (UniqueName: \"kubernetes.io/projected/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-kube-api-access-chvbv\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.030388 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a0488186-4ece-4711-90d9-0adb7c0663c5","Type":"ContainerStarted","Data":"2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5"} Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.030746 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a0488186-4ece-4711-90d9-0adb7c0663c5" containerName="cinder-api-log" containerID="cri-o://3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53" gracePeriod=30 Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.030891 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.031097 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a0488186-4ece-4711-90d9-0adb7c0663c5" containerName="cinder-api" containerID="cri-o://2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5" gracePeriod=30 Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.045859 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6fc8fd9965-7vwnt" event={"ID":"dcf00bba-e4b0-46e4-afcd-c4a475ff5047","Type":"ContainerDied","Data":"8b25b3665e321af9d35c05d1af1c71ee5a9fc0d949e1c111c8c6d0b18de35d41"} Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.046124 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6fc8fd9965-7vwnt" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.052810 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"25604397-9797-48ac-ad30-bf8b69b00bfa","Type":"ContainerStarted","Data":"5fdc3d6f688e6e7802f6f8fbf134f552cc25c3b502000a75964be766b6e41b91"} Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.053300 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.053287085 podStartE2EDuration="5.053287085s" podCreationTimestamp="2025-12-02 14:36:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:53.046834554 +0000 UTC m=+1244.238143263" watchObservedRunningTime="2025-12-02 14:36:53.053287085 +0000 UTC m=+1244.244595794" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.061359 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.079273 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-scripts\") pod \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\" (UID: \"dcf00bba-e4b0-46e4-afcd-c4a475ff5047\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.081139 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-scripts" (OuterVolumeSpecName: "scripts") pod "dcf00bba-e4b0-46e4-afcd-c4a475ff5047" (UID: "dcf00bba-e4b0-46e4-afcd-c4a475ff5047"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.115093 4902 scope.go:117] "RemoveContainer" containerID="31fa4fd7dccceddb5792c84cf234eebaa6816a9d8673b80e34cbc681a2ef4189" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.119970 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="705850b6-c4e0-43ae-a1b2-1a8c536b97bd" path="/var/lib/kubelet/pods/705850b6-c4e0-43ae-a1b2-1a8c536b97bd/volumes" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.182858 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcf00bba-e4b0-46e4-afcd-c4a475ff5047-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.219692 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.399244 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.405025 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6fc8fd9965-7vwnt"] Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.414172 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6fc8fd9965-7vwnt"] Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.502081 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/318a2e77-a615-4af8-bade-d574dd90ca91-horizon-secret-key\") pod \"318a2e77-a615-4af8-bade-d574dd90ca91\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.502428 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/318a2e77-a615-4af8-bade-d574dd90ca91-logs\") pod \"318a2e77-a615-4af8-bade-d574dd90ca91\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.502481 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/318a2e77-a615-4af8-bade-d574dd90ca91-scripts\") pod \"318a2e77-a615-4af8-bade-d574dd90ca91\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.502513 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wql4d\" (UniqueName: \"kubernetes.io/projected/318a2e77-a615-4af8-bade-d574dd90ca91-kube-api-access-wql4d\") pod \"318a2e77-a615-4af8-bade-d574dd90ca91\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.502532 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/318a2e77-a615-4af8-bade-d574dd90ca91-config-data\") pod \"318a2e77-a615-4af8-bade-d574dd90ca91\" (UID: \"318a2e77-a615-4af8-bade-d574dd90ca91\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.503126 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/318a2e77-a615-4af8-bade-d574dd90ca91-logs" (OuterVolumeSpecName: "logs") pod "318a2e77-a615-4af8-bade-d574dd90ca91" (UID: "318a2e77-a615-4af8-bade-d574dd90ca91"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.528109 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/318a2e77-a615-4af8-bade-d574dd90ca91-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "318a2e77-a615-4af8-bade-d574dd90ca91" (UID: "318a2e77-a615-4af8-bade-d574dd90ca91"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.536226 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/318a2e77-a615-4af8-bade-d574dd90ca91-kube-api-access-wql4d" (OuterVolumeSpecName: "kube-api-access-wql4d") pod "318a2e77-a615-4af8-bade-d574dd90ca91" (UID: "318a2e77-a615-4af8-bade-d574dd90ca91"). InnerVolumeSpecName "kube-api-access-wql4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.536709 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/318a2e77-a615-4af8-bade-d574dd90ca91-scripts" (OuterVolumeSpecName: "scripts") pod "318a2e77-a615-4af8-bade-d574dd90ca91" (UID: "318a2e77-a615-4af8-bade-d574dd90ca91"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.540274 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/318a2e77-a615-4af8-bade-d574dd90ca91-config-data" (OuterVolumeSpecName: "config-data") pod "318a2e77-a615-4af8-bade-d574dd90ca91" (UID: "318a2e77-a615-4af8-bade-d574dd90ca91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.562013 4902 scope.go:117] "RemoveContainer" containerID="9eedcce42209e0a9f4b55b6908860a717e63479586c12d555412f4a2fa66db48" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.605127 4902 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/318a2e77-a615-4af8-bade-d574dd90ca91-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.605164 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/318a2e77-a615-4af8-bade-d574dd90ca91-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.605175 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/318a2e77-a615-4af8-bade-d574dd90ca91-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.605184 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wql4d\" (UniqueName: \"kubernetes.io/projected/318a2e77-a615-4af8-bade-d574dd90ca91-kube-api-access-wql4d\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.605200 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/318a2e77-a615-4af8-bade-d574dd90ca91-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.668293 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.716399 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.808395 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-scripts\") pod \"a0488186-4ece-4711-90d9-0adb7c0663c5\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.808650 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsmpm\" (UniqueName: \"kubernetes.io/projected/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-kube-api-access-qsmpm\") pod \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.808724 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-config-data\") pod \"a0488186-4ece-4711-90d9-0adb7c0663c5\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.808771 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-config\") pod \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.808804 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-combined-ca-bundle\") pod \"a0488186-4ece-4711-90d9-0adb7c0663c5\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.808856 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-combined-ca-bundle\") pod \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.808891 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0488186-4ece-4711-90d9-0adb7c0663c5-logs\") pod \"a0488186-4ece-4711-90d9-0adb7c0663c5\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.808913 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-httpd-config\") pod \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.808949 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-ovndb-tls-certs\") pod \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\" (UID: \"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.808984 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4cx8v\" (UniqueName: \"kubernetes.io/projected/a0488186-4ece-4711-90d9-0adb7c0663c5-kube-api-access-4cx8v\") pod \"a0488186-4ece-4711-90d9-0adb7c0663c5\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.809039 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a0488186-4ece-4711-90d9-0adb7c0663c5-etc-machine-id\") pod \"a0488186-4ece-4711-90d9-0adb7c0663c5\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.809058 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-config-data-custom\") pod \"a0488186-4ece-4711-90d9-0adb7c0663c5\" (UID: \"a0488186-4ece-4711-90d9-0adb7c0663c5\") " Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.810520 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0488186-4ece-4711-90d9-0adb7c0663c5-logs" (OuterVolumeSpecName: "logs") pod "a0488186-4ece-4711-90d9-0adb7c0663c5" (UID: "a0488186-4ece-4711-90d9-0adb7c0663c5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.810529 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a0488186-4ece-4711-90d9-0adb7c0663c5-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a0488186-4ece-4711-90d9-0adb7c0663c5" (UID: "a0488186-4ece-4711-90d9-0adb7c0663c5"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.812942 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" (UID: "81bf29f8-c4f9-4cc4-9c5e-3fac243d204c"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.813108 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-scripts" (OuterVolumeSpecName: "scripts") pod "a0488186-4ece-4711-90d9-0adb7c0663c5" (UID: "a0488186-4ece-4711-90d9-0adb7c0663c5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.815312 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-kube-api-access-qsmpm" (OuterVolumeSpecName: "kube-api-access-qsmpm") pod "81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" (UID: "81bf29f8-c4f9-4cc4-9c5e-3fac243d204c"). InnerVolumeSpecName "kube-api-access-qsmpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.820770 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0488186-4ece-4711-90d9-0adb7c0663c5-kube-api-access-4cx8v" (OuterVolumeSpecName: "kube-api-access-4cx8v") pod "a0488186-4ece-4711-90d9-0adb7c0663c5" (UID: "a0488186-4ece-4711-90d9-0adb7c0663c5"). InnerVolumeSpecName "kube-api-access-4cx8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.820776 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a0488186-4ece-4711-90d9-0adb7c0663c5" (UID: "a0488186-4ece-4711-90d9-0adb7c0663c5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.912275 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0488186-4ece-4711-90d9-0adb7c0663c5-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.912304 4902 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.912317 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4cx8v\" (UniqueName: \"kubernetes.io/projected/a0488186-4ece-4711-90d9-0adb7c0663c5-kube-api-access-4cx8v\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.912330 4902 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a0488186-4ece-4711-90d9-0adb7c0663c5-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.912340 4902 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.912351 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.912363 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsmpm\" (UniqueName: \"kubernetes.io/projected/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-kube-api-access-qsmpm\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.945509 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a0488186-4ece-4711-90d9-0adb7c0663c5" (UID: "a0488186-4ece-4711-90d9-0adb7c0663c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.971712 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-config" (OuterVolumeSpecName: "config") pod "81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" (UID: "81bf29f8-c4f9-4cc4-9c5e-3fac243d204c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.977661 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" (UID: "81bf29f8-c4f9-4cc4-9c5e-3fac243d204c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:53 crc kubenswrapper[4902]: I1202 14:36:53.985441 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-config-data" (OuterVolumeSpecName: "config-data") pod "a0488186-4ece-4711-90d9-0adb7c0663c5" (UID: "a0488186-4ece-4711-90d9-0adb7c0663c5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.011247 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" (UID: "81bf29f8-c4f9-4cc4-9c5e-3fac243d204c"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.014378 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.014412 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.014421 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0488186-4ece-4711-90d9-0adb7c0663c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.014431 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.014439 4902 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.072372 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" event={"ID":"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64","Type":"ContainerStarted","Data":"6d9a90cd62750445a107c3c07dc693aedfe0e6cef3d6be1379fb00d6ed5027e1"} Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.072720 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" event={"ID":"1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64","Type":"ContainerStarted","Data":"e79c614d44a7c9b503f395a590dd53332e7ad50a6df26a868dae07d5870a42b7"} Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.075737 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"25604397-9797-48ac-ad30-bf8b69b00bfa","Type":"ContainerStarted","Data":"58a88a7ee6a2b8180d6e26fba4874ecccaa311d3eb191691cde5716db1b146ad"} Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.079688 4902 generic.go:334] "Generic (PLEG): container finished" podID="81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" containerID="e70e649b966f159ca38555efb648a7d2e58df64c8faddb40e1d59e95314ffa61" exitCode=0 Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.079758 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-75bd8f464d-cnfhg" event={"ID":"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c","Type":"ContainerDied","Data":"e70e649b966f159ca38555efb648a7d2e58df64c8faddb40e1d59e95314ffa61"} Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.079779 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-75bd8f464d-cnfhg" event={"ID":"81bf29f8-c4f9-4cc4-9c5e-3fac243d204c","Type":"ContainerDied","Data":"d06237c0441a8bcfb905045362575a3a55a04184ec79fbc57e47bf77ced2758d"} Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.079793 4902 scope.go:117] "RemoveContainer" containerID="8c2e85f88f046ec76e1ebc10ea8c97f4f5117270855f91fc596e16ac337c19bc" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.079704 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-75bd8f464d-cnfhg" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.084342 4902 generic.go:334] "Generic (PLEG): container finished" podID="a0488186-4ece-4711-90d9-0adb7c0663c5" containerID="2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5" exitCode=0 Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.084388 4902 generic.go:334] "Generic (PLEG): container finished" podID="a0488186-4ece-4711-90d9-0adb7c0663c5" containerID="3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53" exitCode=143 Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.084427 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a0488186-4ece-4711-90d9-0adb7c0663c5","Type":"ContainerDied","Data":"2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5"} Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.084469 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a0488186-4ece-4711-90d9-0adb7c0663c5","Type":"ContainerDied","Data":"3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53"} Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.084480 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a0488186-4ece-4711-90d9-0adb7c0663c5","Type":"ContainerDied","Data":"5c0d97249fba417949e780668d1151adb2be86f5fd588c1b11c262b7ceab41fa"} Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.084549 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.094419 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d54dc89f-fxxfb" event={"ID":"bea5327c-b120-4e9b-876c-94e46621dcad","Type":"ContainerStarted","Data":"d63e49d90455489e1e4c629d6426d24103e04727d4a458851de204112f5102b3"} Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.094468 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d54dc89f-fxxfb" event={"ID":"bea5327c-b120-4e9b-876c-94e46621dcad","Type":"ContainerStarted","Data":"dcb3eb0fdb2ee43839ec6f576528f984dcb5f4abcfbf4affa5ce7b54d6d82ba6"} Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.095696 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7cb4b5cd86-6nl4d" podStartSLOduration=2.7742007859999998 podStartE2EDuration="6.095679948s" podCreationTimestamp="2025-12-02 14:36:48 +0000 UTC" firstStartedPulling="2025-12-02 14:36:49.854688279 +0000 UTC m=+1241.045996988" lastFinishedPulling="2025-12-02 14:36:53.176167451 +0000 UTC m=+1244.367476150" observedRunningTime="2025-12-02 14:36:54.093893728 +0000 UTC m=+1245.285202437" watchObservedRunningTime="2025-12-02 14:36:54.095679948 +0000 UTC m=+1245.286988657" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.118733 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-ff5b6d47-dgpgd" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.119484 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-ff5b6d47-dgpgd" event={"ID":"318a2e77-a615-4af8-bade-d574dd90ca91","Type":"ContainerDied","Data":"2e1248a5ee03bf1b25bce71082f71bc5798b816fab91c488069ae1e3fdf01797"} Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.126657 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.392302498 podStartE2EDuration="7.126639029s" podCreationTimestamp="2025-12-02 14:36:47 +0000 UTC" firstStartedPulling="2025-12-02 14:36:49.033155197 +0000 UTC m=+1240.224463906" lastFinishedPulling="2025-12-02 14:36:50.767491728 +0000 UTC m=+1241.958800437" observedRunningTime="2025-12-02 14:36:54.119682163 +0000 UTC m=+1245.310990872" watchObservedRunningTime="2025-12-02 14:36:54.126639029 +0000 UTC m=+1245.317947828" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.180098 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5d54dc89f-fxxfb" podStartSLOduration=3.149779078 podStartE2EDuration="6.180080932s" podCreationTimestamp="2025-12-02 14:36:48 +0000 UTC" firstStartedPulling="2025-12-02 14:36:50.144863909 +0000 UTC m=+1241.336172618" lastFinishedPulling="2025-12-02 14:36:53.175165763 +0000 UTC m=+1244.366474472" observedRunningTime="2025-12-02 14:36:54.158376581 +0000 UTC m=+1245.349685290" watchObservedRunningTime="2025-12-02 14:36:54.180080932 +0000 UTC m=+1245.371389641" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.204951 4902 scope.go:117] "RemoveContainer" containerID="e70e649b966f159ca38555efb648a7d2e58df64c8faddb40e1d59e95314ffa61" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.231622 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.235222 4902 scope.go:117] "RemoveContainer" containerID="8c2e85f88f046ec76e1ebc10ea8c97f4f5117270855f91fc596e16ac337c19bc" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.239448 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c2e85f88f046ec76e1ebc10ea8c97f4f5117270855f91fc596e16ac337c19bc\": container with ID starting with 8c2e85f88f046ec76e1ebc10ea8c97f4f5117270855f91fc596e16ac337c19bc not found: ID does not exist" containerID="8c2e85f88f046ec76e1ebc10ea8c97f4f5117270855f91fc596e16ac337c19bc" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.239675 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c2e85f88f046ec76e1ebc10ea8c97f4f5117270855f91fc596e16ac337c19bc"} err="failed to get container status \"8c2e85f88f046ec76e1ebc10ea8c97f4f5117270855f91fc596e16ac337c19bc\": rpc error: code = NotFound desc = could not find container \"8c2e85f88f046ec76e1ebc10ea8c97f4f5117270855f91fc596e16ac337c19bc\": container with ID starting with 8c2e85f88f046ec76e1ebc10ea8c97f4f5117270855f91fc596e16ac337c19bc not found: ID does not exist" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.239759 4902 scope.go:117] "RemoveContainer" containerID="e70e649b966f159ca38555efb648a7d2e58df64c8faddb40e1d59e95314ffa61" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.243820 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e70e649b966f159ca38555efb648a7d2e58df64c8faddb40e1d59e95314ffa61\": container with ID starting with e70e649b966f159ca38555efb648a7d2e58df64c8faddb40e1d59e95314ffa61 not found: ID does not exist" containerID="e70e649b966f159ca38555efb648a7d2e58df64c8faddb40e1d59e95314ffa61" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.243962 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e70e649b966f159ca38555efb648a7d2e58df64c8faddb40e1d59e95314ffa61"} err="failed to get container status \"e70e649b966f159ca38555efb648a7d2e58df64c8faddb40e1d59e95314ffa61\": rpc error: code = NotFound desc = could not find container \"e70e649b966f159ca38555efb648a7d2e58df64c8faddb40e1d59e95314ffa61\": container with ID starting with e70e649b966f159ca38555efb648a7d2e58df64c8faddb40e1d59e95314ffa61 not found: ID does not exist" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.244038 4902 scope.go:117] "RemoveContainer" containerID="2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.260486 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.271606 4902 scope.go:117] "RemoveContainer" containerID="3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.291973 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.292422 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcf00bba-e4b0-46e4-afcd-c4a475ff5047" containerName="horizon-log" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292436 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcf00bba-e4b0-46e4-afcd-c4a475ff5047" containerName="horizon-log" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.292443 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" containerName="neutron-api" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292448 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" containerName="neutron-api" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.292462 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="318a2e77-a615-4af8-bade-d574dd90ca91" containerName="horizon-log" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292468 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="318a2e77-a615-4af8-bade-d574dd90ca91" containerName="horizon-log" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.292475 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="ceilometer-notification-agent" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292481 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="ceilometer-notification-agent" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.292489 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="sg-core" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292495 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="sg-core" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.292507 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0488186-4ece-4711-90d9-0adb7c0663c5" containerName="cinder-api-log" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292513 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0488186-4ece-4711-90d9-0adb7c0663c5" containerName="cinder-api-log" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.292522 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="318a2e77-a615-4af8-bade-d574dd90ca91" containerName="horizon" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292527 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="318a2e77-a615-4af8-bade-d574dd90ca91" containerName="horizon" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.292539 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="ceilometer-central-agent" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292547 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="ceilometer-central-agent" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.292574 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcf00bba-e4b0-46e4-afcd-c4a475ff5047" containerName="horizon" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292580 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcf00bba-e4b0-46e4-afcd-c4a475ff5047" containerName="horizon" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.292595 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="705850b6-c4e0-43ae-a1b2-1a8c536b97bd" containerName="horizon" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292602 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="705850b6-c4e0-43ae-a1b2-1a8c536b97bd" containerName="horizon" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.292614 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f7e26a-463d-4c12-a7e4-1421493143a9" containerName="init" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292620 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f7e26a-463d-4c12-a7e4-1421493143a9" containerName="init" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.292634 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" containerName="neutron-httpd" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292639 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" containerName="neutron-httpd" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.292649 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="proxy-httpd" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292655 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="proxy-httpd" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.292662 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0488186-4ece-4711-90d9-0adb7c0663c5" containerName="cinder-api" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292668 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0488186-4ece-4711-90d9-0adb7c0663c5" containerName="cinder-api" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.292681 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="705850b6-c4e0-43ae-a1b2-1a8c536b97bd" containerName="horizon-log" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292686 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="705850b6-c4e0-43ae-a1b2-1a8c536b97bd" containerName="horizon-log" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292855 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcf00bba-e4b0-46e4-afcd-c4a475ff5047" containerName="horizon" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292864 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="62f7e26a-463d-4c12-a7e4-1421493143a9" containerName="init" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292870 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" containerName="neutron-api" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292881 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="ceilometer-notification-agent" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292889 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcf00bba-e4b0-46e4-afcd-c4a475ff5047" containerName="horizon-log" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292899 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0488186-4ece-4711-90d9-0adb7c0663c5" containerName="cinder-api-log" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292907 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" containerName="neutron-httpd" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292914 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="ceilometer-central-agent" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292925 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="705850b6-c4e0-43ae-a1b2-1a8c536b97bd" containerName="horizon" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292934 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="318a2e77-a615-4af8-bade-d574dd90ca91" containerName="horizon-log" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292944 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="proxy-httpd" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292958 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="318a2e77-a615-4af8-bade-d574dd90ca91" containerName="horizon" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292965 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="705850b6-c4e0-43ae-a1b2-1a8c536b97bd" containerName="horizon-log" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292975 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0488186-4ece-4711-90d9-0adb7c0663c5" containerName="cinder-api" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.292982 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" containerName="sg-core" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.294270 4902 scope.go:117] "RemoveContainer" containerID="2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.294985 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.298880 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.299063 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.299171 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5\": container with ID starting with 2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5 not found: ID does not exist" containerID="2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.299198 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5"} err="failed to get container status \"2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5\": rpc error: code = NotFound desc = could not find container \"2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5\": container with ID starting with 2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5 not found: ID does not exist" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.299220 4902 scope.go:117] "RemoveContainer" containerID="3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.299730 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 02 14:36:54 crc kubenswrapper[4902]: E1202 14:36:54.300813 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53\": container with ID starting with 3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53 not found: ID does not exist" containerID="3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.300854 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53"} err="failed to get container status \"3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53\": rpc error: code = NotFound desc = could not find container \"3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53\": container with ID starting with 3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53 not found: ID does not exist" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.300881 4902 scope.go:117] "RemoveContainer" containerID="2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.301124 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5"} err="failed to get container status \"2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5\": rpc error: code = NotFound desc = could not find container \"2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5\": container with ID starting with 2ec7732b752b08e4089d06b287e6656cdb72f2c550638e0e54cad85b178e35b5 not found: ID does not exist" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.301146 4902 scope.go:117] "RemoveContainer" containerID="3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.301338 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53"} err="failed to get container status \"3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53\": rpc error: code = NotFound desc = could not find container \"3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53\": container with ID starting with 3951a6fa32ba5b429a8a2ea25040fb82433734db299787170264260118456c53 not found: ID does not exist" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.301356 4902 scope.go:117] "RemoveContainer" containerID="d71cc061eb90f6c52cc70e264560970e0903d0adc87c344cb8b96c161dbab470" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.305694 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-ff5b6d47-dgpgd"] Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.321291 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-ff5b6d47-dgpgd"] Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.339073 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.346613 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-75bd8f464d-cnfhg"] Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.355656 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-75bd8f464d-cnfhg"] Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.424541 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.424624 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-config-data-custom\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.424666 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e646a9bd-99e5-41c1-8187-076691cad16e-logs\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.424688 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-config-data\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.424710 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.424773 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.424799 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e646a9bd-99e5-41c1-8187-076691cad16e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.424825 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-scripts\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.424843 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67d7p\" (UniqueName: \"kubernetes.io/projected/e646a9bd-99e5-41c1-8187-076691cad16e-kube-api-access-67d7p\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.528488 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e646a9bd-99e5-41c1-8187-076691cad16e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.528542 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-scripts\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.528577 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67d7p\" (UniqueName: \"kubernetes.io/projected/e646a9bd-99e5-41c1-8187-076691cad16e-kube-api-access-67d7p\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.528638 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.528674 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-config-data-custom\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.528709 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e646a9bd-99e5-41c1-8187-076691cad16e-logs\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.528726 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-config-data\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.528749 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.528796 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.530260 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e646a9bd-99e5-41c1-8187-076691cad16e-logs\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.532982 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e646a9bd-99e5-41c1-8187-076691cad16e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.533812 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.549554 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-scripts\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.556129 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.556359 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.556661 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-config-data-custom\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.557085 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e646a9bd-99e5-41c1-8187-076691cad16e-config-data\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.568096 4902 scope.go:117] "RemoveContainer" containerID="6f7e670709e8e0842994de3af86ecc3e1d12533a9c5031a5d8ac8b6a608bbf2c" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.569164 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67d7p\" (UniqueName: \"kubernetes.io/projected/e646a9bd-99e5-41c1-8187-076691cad16e-kube-api-access-67d7p\") pod \"cinder-api-0\" (UID: \"e646a9bd-99e5-41c1-8187-076691cad16e\") " pod="openstack/cinder-api-0" Dec 02 14:36:54 crc kubenswrapper[4902]: I1202 14:36:54.627927 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.007888 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-54c8dd8948-wqzzm"] Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.009544 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.012437 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.013689 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.034805 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54c8dd8948-wqzzm"] Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.118438 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="318a2e77-a615-4af8-bade-d574dd90ca91" path="/var/lib/kubelet/pods/318a2e77-a615-4af8-bade-d574dd90ca91/volumes" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.119289 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81bf29f8-c4f9-4cc4-9c5e-3fac243d204c" path="/var/lib/kubelet/pods/81bf29f8-c4f9-4cc4-9c5e-3fac243d204c/volumes" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.120300 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0488186-4ece-4711-90d9-0adb7c0663c5" path="/var/lib/kubelet/pods/a0488186-4ece-4711-90d9-0adb7c0663c5/volumes" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.121709 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcf00bba-e4b0-46e4-afcd-c4a475ff5047" path="/var/lib/kubelet/pods/dcf00bba-e4b0-46e4-afcd-c4a475ff5047/volumes" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.144539 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-logs\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.144716 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w294q\" (UniqueName: \"kubernetes.io/projected/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-kube-api-access-w294q\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.144768 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-public-tls-certs\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.144836 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-config-data\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.144886 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-internal-tls-certs\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.144930 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-config-data-custom\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.144963 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-combined-ca-bundle\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.169515 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.246363 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-internal-tls-certs\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.246433 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-config-data-custom\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.246461 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-combined-ca-bundle\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.246495 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-logs\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.246728 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w294q\" (UniqueName: \"kubernetes.io/projected/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-kube-api-access-w294q\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.246777 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-public-tls-certs\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.246855 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-config-data\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.250666 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-logs\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.253752 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-combined-ca-bundle\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.253979 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-internal-tls-certs\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.256320 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-config-data-custom\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.256667 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-public-tls-certs\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.258508 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-config-data\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.266531 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w294q\" (UniqueName: \"kubernetes.io/projected/eb085e9d-79c7-495c-b0b3-7fbe62e96cf7-kube-api-access-w294q\") pod \"barbican-api-54c8dd8948-wqzzm\" (UID: \"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7\") " pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.381694 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.451855 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-54c4bbdfbb-v8pjf" Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.518647 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-85bf5878d8-bn7cr"] Dec 02 14:36:55 crc kubenswrapper[4902]: I1202 14:36:55.877297 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54c8dd8948-wqzzm"] Dec 02 14:36:55 crc kubenswrapper[4902]: W1202 14:36:55.879494 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb085e9d_79c7_495c_b0b3_7fbe62e96cf7.slice/crio-69ece23709fa7194b77f3362a8d31edef2621057eb88ea7dfe623680b906af30 WatchSource:0}: Error finding container 69ece23709fa7194b77f3362a8d31edef2621057eb88ea7dfe623680b906af30: Status 404 returned error can't find the container with id 69ece23709fa7194b77f3362a8d31edef2621057eb88ea7dfe623680b906af30 Dec 02 14:36:56 crc kubenswrapper[4902]: I1202 14:36:56.150395 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e646a9bd-99e5-41c1-8187-076691cad16e","Type":"ContainerStarted","Data":"741d9d6e8ac3690d7cb898a6da064dbf0417e93722c698c8969ef3bd5b567196"} Dec 02 14:36:56 crc kubenswrapper[4902]: I1202 14:36:56.150448 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e646a9bd-99e5-41c1-8187-076691cad16e","Type":"ContainerStarted","Data":"a41e863ac67c79c313d25636815fd2879490a82d2f0eb54130613dd770840b83"} Dec 02 14:36:56 crc kubenswrapper[4902]: I1202 14:36:56.152513 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54c8dd8948-wqzzm" event={"ID":"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7","Type":"ContainerStarted","Data":"2d1399f97f6d4e12b3f221e1ba21fa7270cc922925e4010bb4d2d0323ab7c49a"} Dec 02 14:36:56 crc kubenswrapper[4902]: I1202 14:36:56.152573 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54c8dd8948-wqzzm" event={"ID":"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7","Type":"ContainerStarted","Data":"69ece23709fa7194b77f3362a8d31edef2621057eb88ea7dfe623680b906af30"} Dec 02 14:36:57 crc kubenswrapper[4902]: I1202 14:36:57.162551 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54c8dd8948-wqzzm" event={"ID":"eb085e9d-79c7-495c-b0b3-7fbe62e96cf7","Type":"ContainerStarted","Data":"5430b6cfee72f8b6dd990cd30c95195a1ab1930e279bc5904b0ef9b4edfbcce6"} Dec 02 14:36:57 crc kubenswrapper[4902]: I1202 14:36:57.163573 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:57 crc kubenswrapper[4902]: I1202 14:36:57.163593 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:36:57 crc kubenswrapper[4902]: I1202 14:36:57.165928 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e646a9bd-99e5-41c1-8187-076691cad16e","Type":"ContainerStarted","Data":"556e805201a0f5efd7cf2238ca515c00a9242b6f21afa2e75351006bbd0cddfd"} Dec 02 14:36:57 crc kubenswrapper[4902]: I1202 14:36:57.166464 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 02 14:36:57 crc kubenswrapper[4902]: I1202 14:36:57.198532 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-54c8dd8948-wqzzm" podStartSLOduration=3.1985164409999998 podStartE2EDuration="3.198516441s" podCreationTimestamp="2025-12-02 14:36:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:57.188087408 +0000 UTC m=+1248.379396117" watchObservedRunningTime="2025-12-02 14:36:57.198516441 +0000 UTC m=+1248.389825150" Dec 02 14:36:57 crc kubenswrapper[4902]: I1202 14:36:57.215172 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.215142049 podStartE2EDuration="3.215142049s" podCreationTimestamp="2025-12-02 14:36:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:36:57.21197788 +0000 UTC m=+1248.403286589" watchObservedRunningTime="2025-12-02 14:36:57.215142049 +0000 UTC m=+1248.406450758" Dec 02 14:36:57 crc kubenswrapper[4902]: I1202 14:36:57.271143 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:57 crc kubenswrapper[4902]: I1202 14:36:57.272499 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-786fb786b8-w8tbv" Dec 02 14:36:58 crc kubenswrapper[4902]: I1202 14:36:58.176362 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 02 14:36:58 crc kubenswrapper[4902]: I1202 14:36:58.394207 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 02 14:36:58 crc kubenswrapper[4902]: I1202 14:36:58.909510 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:36:58 crc kubenswrapper[4902]: I1202 14:36:58.995921 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-5qhnq"] Dec 02 14:36:58 crc kubenswrapper[4902]: I1202 14:36:58.996187 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" podUID="8fbb65ec-2bd4-480c-8c37-106cad501bb4" containerName="dnsmasq-dns" containerID="cri-o://4777eb6fa257da588902e78b521c57efea970722d0f164fb8dc5613243aad33d" gracePeriod=10 Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.214446 4902 generic.go:334] "Generic (PLEG): container finished" podID="8fbb65ec-2bd4-480c-8c37-106cad501bb4" containerID="4777eb6fa257da588902e78b521c57efea970722d0f164fb8dc5613243aad33d" exitCode=0 Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.216111 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" event={"ID":"8fbb65ec-2bd4-480c-8c37-106cad501bb4","Type":"ContainerDied","Data":"4777eb6fa257da588902e78b521c57efea970722d0f164fb8dc5613243aad33d"} Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.296372 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.597734 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-54b4cc97b8-h6x6n" podUID="e27977c3-7e5e-47b7-b217-96acb16119c2" containerName="barbican-api-log" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.611453 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.743897 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-dns-swift-storage-0\") pod \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.743965 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-ovsdbserver-nb\") pod \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.744064 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-dns-svc\") pod \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.744082 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-config\") pod \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.744155 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-ovsdbserver-sb\") pod \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.744238 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nntb\" (UniqueName: \"kubernetes.io/projected/8fbb65ec-2bd4-480c-8c37-106cad501bb4-kube-api-access-2nntb\") pod \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\" (UID: \"8fbb65ec-2bd4-480c-8c37-106cad501bb4\") " Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.753942 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fbb65ec-2bd4-480c-8c37-106cad501bb4-kube-api-access-2nntb" (OuterVolumeSpecName: "kube-api-access-2nntb") pod "8fbb65ec-2bd4-480c-8c37-106cad501bb4" (UID: "8fbb65ec-2bd4-480c-8c37-106cad501bb4"). InnerVolumeSpecName "kube-api-access-2nntb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.808039 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8fbb65ec-2bd4-480c-8c37-106cad501bb4" (UID: "8fbb65ec-2bd4-480c-8c37-106cad501bb4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.810724 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8fbb65ec-2bd4-480c-8c37-106cad501bb4" (UID: "8fbb65ec-2bd4-480c-8c37-106cad501bb4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.846012 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-config" (OuterVolumeSpecName: "config") pod "8fbb65ec-2bd4-480c-8c37-106cad501bb4" (UID: "8fbb65ec-2bd4-480c-8c37-106cad501bb4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.846870 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nntb\" (UniqueName: \"kubernetes.io/projected/8fbb65ec-2bd4-480c-8c37-106cad501bb4-kube-api-access-2nntb\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.846894 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.846903 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.846911 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.864098 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8fbb65ec-2bd4-480c-8c37-106cad501bb4" (UID: "8fbb65ec-2bd4-480c-8c37-106cad501bb4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.893077 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8fbb65ec-2bd4-480c-8c37-106cad501bb4" (UID: "8fbb65ec-2bd4-480c-8c37-106cad501bb4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.949400 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 14:36:59 crc kubenswrapper[4902]: I1202 14:36:59.949456 4902 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8fbb65ec-2bd4-480c-8c37-106cad501bb4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:00 crc kubenswrapper[4902]: I1202 14:37:00.249854 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" Dec 02 14:37:00 crc kubenswrapper[4902]: I1202 14:37:00.249955 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="25604397-9797-48ac-ad30-bf8b69b00bfa" containerName="cinder-scheduler" containerID="cri-o://5fdc3d6f688e6e7802f6f8fbf134f552cc25c3b502000a75964be766b6e41b91" gracePeriod=30 Dec 02 14:37:00 crc kubenswrapper[4902]: I1202 14:37:00.250053 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-5qhnq" event={"ID":"8fbb65ec-2bd4-480c-8c37-106cad501bb4","Type":"ContainerDied","Data":"7e59011e8fde7da3d351321d186590ff8dab3e2c42b836cc4dc53dc331d3fb85"} Dec 02 14:37:00 crc kubenswrapper[4902]: I1202 14:37:00.250090 4902 scope.go:117] "RemoveContainer" containerID="4777eb6fa257da588902e78b521c57efea970722d0f164fb8dc5613243aad33d" Dec 02 14:37:00 crc kubenswrapper[4902]: I1202 14:37:00.250533 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="25604397-9797-48ac-ad30-bf8b69b00bfa" containerName="probe" containerID="cri-o://58a88a7ee6a2b8180d6e26fba4874ecccaa311d3eb191691cde5716db1b146ad" gracePeriod=30 Dec 02 14:37:00 crc kubenswrapper[4902]: I1202 14:37:00.279528 4902 scope.go:117] "RemoveContainer" containerID="8dce742eede555127ac5370d816e48082c2ca90d67ac6c41d67d926ece1b2e5f" Dec 02 14:37:00 crc kubenswrapper[4902]: I1202 14:37:00.310997 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-5qhnq"] Dec 02 14:37:00 crc kubenswrapper[4902]: I1202 14:37:00.319222 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-5qhnq"] Dec 02 14:37:01 crc kubenswrapper[4902]: I1202 14:37:01.117173 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fbb65ec-2bd4-480c-8c37-106cad501bb4" path="/var/lib/kubelet/pods/8fbb65ec-2bd4-480c-8c37-106cad501bb4/volumes" Dec 02 14:37:01 crc kubenswrapper[4902]: I1202 14:37:01.251867 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:37:01 crc kubenswrapper[4902]: I1202 14:37:01.282634 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:37:02 crc kubenswrapper[4902]: I1202 14:37:02.273082 4902 generic.go:334] "Generic (PLEG): container finished" podID="25604397-9797-48ac-ad30-bf8b69b00bfa" containerID="58a88a7ee6a2b8180d6e26fba4874ecccaa311d3eb191691cde5716db1b146ad" exitCode=0 Dec 02 14:37:02 crc kubenswrapper[4902]: I1202 14:37:02.273155 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"25604397-9797-48ac-ad30-bf8b69b00bfa","Type":"ContainerDied","Data":"58a88a7ee6a2b8180d6e26fba4874ecccaa311d3eb191691cde5716db1b146ad"} Dec 02 14:37:02 crc kubenswrapper[4902]: I1202 14:37:02.743747 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-867694b54b-2t5p7" Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.752299 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.824834 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwshr\" (UniqueName: \"kubernetes.io/projected/25604397-9797-48ac-ad30-bf8b69b00bfa-kube-api-access-kwshr\") pod \"25604397-9797-48ac-ad30-bf8b69b00bfa\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.824910 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-combined-ca-bundle\") pod \"25604397-9797-48ac-ad30-bf8b69b00bfa\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.825133 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-config-data\") pod \"25604397-9797-48ac-ad30-bf8b69b00bfa\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.825168 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-scripts\") pod \"25604397-9797-48ac-ad30-bf8b69b00bfa\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.825189 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-config-data-custom\") pod \"25604397-9797-48ac-ad30-bf8b69b00bfa\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.825213 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/25604397-9797-48ac-ad30-bf8b69b00bfa-etc-machine-id\") pod \"25604397-9797-48ac-ad30-bf8b69b00bfa\" (UID: \"25604397-9797-48ac-ad30-bf8b69b00bfa\") " Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.825372 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/25604397-9797-48ac-ad30-bf8b69b00bfa-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "25604397-9797-48ac-ad30-bf8b69b00bfa" (UID: "25604397-9797-48ac-ad30-bf8b69b00bfa"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.825807 4902 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/25604397-9797-48ac-ad30-bf8b69b00bfa-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.832286 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25604397-9797-48ac-ad30-bf8b69b00bfa-kube-api-access-kwshr" (OuterVolumeSpecName: "kube-api-access-kwshr") pod "25604397-9797-48ac-ad30-bf8b69b00bfa" (UID: "25604397-9797-48ac-ad30-bf8b69b00bfa"). InnerVolumeSpecName "kube-api-access-kwshr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.833777 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "25604397-9797-48ac-ad30-bf8b69b00bfa" (UID: "25604397-9797-48ac-ad30-bf8b69b00bfa"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.843238 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-scripts" (OuterVolumeSpecName: "scripts") pod "25604397-9797-48ac-ad30-bf8b69b00bfa" (UID: "25604397-9797-48ac-ad30-bf8b69b00bfa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.900847 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25604397-9797-48ac-ad30-bf8b69b00bfa" (UID: "25604397-9797-48ac-ad30-bf8b69b00bfa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.930894 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.930934 4902 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.930951 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwshr\" (UniqueName: \"kubernetes.io/projected/25604397-9797-48ac-ad30-bf8b69b00bfa-kube-api-access-kwshr\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.930966 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:03 crc kubenswrapper[4902]: I1202 14:37:03.944877 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-config-data" (OuterVolumeSpecName: "config-data") pod "25604397-9797-48ac-ad30-bf8b69b00bfa" (UID: "25604397-9797-48ac-ad30-bf8b69b00bfa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.032960 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25604397-9797-48ac-ad30-bf8b69b00bfa-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.292210 4902 generic.go:334] "Generic (PLEG): container finished" podID="25604397-9797-48ac-ad30-bf8b69b00bfa" containerID="5fdc3d6f688e6e7802f6f8fbf134f552cc25c3b502000a75964be766b6e41b91" exitCode=0 Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.292257 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"25604397-9797-48ac-ad30-bf8b69b00bfa","Type":"ContainerDied","Data":"5fdc3d6f688e6e7802f6f8fbf134f552cc25c3b502000a75964be766b6e41b91"} Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.292286 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"25604397-9797-48ac-ad30-bf8b69b00bfa","Type":"ContainerDied","Data":"1e00e0ab5d613bc78bcd14b882f380558215256845620f269fc98337e56aa1de"} Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.292286 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.292306 4902 scope.go:117] "RemoveContainer" containerID="58a88a7ee6a2b8180d6e26fba4874ecccaa311d3eb191691cde5716db1b146ad" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.331479 4902 scope.go:117] "RemoveContainer" containerID="5fdc3d6f688e6e7802f6f8fbf134f552cc25c3b502000a75964be766b6e41b91" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.376877 4902 scope.go:117] "RemoveContainer" containerID="58a88a7ee6a2b8180d6e26fba4874ecccaa311d3eb191691cde5716db1b146ad" Dec 02 14:37:04 crc kubenswrapper[4902]: E1202 14:37:04.381628 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58a88a7ee6a2b8180d6e26fba4874ecccaa311d3eb191691cde5716db1b146ad\": container with ID starting with 58a88a7ee6a2b8180d6e26fba4874ecccaa311d3eb191691cde5716db1b146ad not found: ID does not exist" containerID="58a88a7ee6a2b8180d6e26fba4874ecccaa311d3eb191691cde5716db1b146ad" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.381659 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58a88a7ee6a2b8180d6e26fba4874ecccaa311d3eb191691cde5716db1b146ad"} err="failed to get container status \"58a88a7ee6a2b8180d6e26fba4874ecccaa311d3eb191691cde5716db1b146ad\": rpc error: code = NotFound desc = could not find container \"58a88a7ee6a2b8180d6e26fba4874ecccaa311d3eb191691cde5716db1b146ad\": container with ID starting with 58a88a7ee6a2b8180d6e26fba4874ecccaa311d3eb191691cde5716db1b146ad not found: ID does not exist" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.381681 4902 scope.go:117] "RemoveContainer" containerID="5fdc3d6f688e6e7802f6f8fbf134f552cc25c3b502000a75964be766b6e41b91" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.381736 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 14:37:04 crc kubenswrapper[4902]: E1202 14:37:04.386632 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fdc3d6f688e6e7802f6f8fbf134f552cc25c3b502000a75964be766b6e41b91\": container with ID starting with 5fdc3d6f688e6e7802f6f8fbf134f552cc25c3b502000a75964be766b6e41b91 not found: ID does not exist" containerID="5fdc3d6f688e6e7802f6f8fbf134f552cc25c3b502000a75964be766b6e41b91" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.386668 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fdc3d6f688e6e7802f6f8fbf134f552cc25c3b502000a75964be766b6e41b91"} err="failed to get container status \"5fdc3d6f688e6e7802f6f8fbf134f552cc25c3b502000a75964be766b6e41b91\": rpc error: code = NotFound desc = could not find container \"5fdc3d6f688e6e7802f6f8fbf134f552cc25c3b502000a75964be766b6e41b91\": container with ID starting with 5fdc3d6f688e6e7802f6f8fbf134f552cc25c3b502000a75964be766b6e41b91 not found: ID does not exist" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.394981 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.419727 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 14:37:04 crc kubenswrapper[4902]: E1202 14:37:04.420088 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25604397-9797-48ac-ad30-bf8b69b00bfa" containerName="cinder-scheduler" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.420106 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="25604397-9797-48ac-ad30-bf8b69b00bfa" containerName="cinder-scheduler" Dec 02 14:37:04 crc kubenswrapper[4902]: E1202 14:37:04.420140 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fbb65ec-2bd4-480c-8c37-106cad501bb4" containerName="init" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.420146 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fbb65ec-2bd4-480c-8c37-106cad501bb4" containerName="init" Dec 02 14:37:04 crc kubenswrapper[4902]: E1202 14:37:04.420156 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fbb65ec-2bd4-480c-8c37-106cad501bb4" containerName="dnsmasq-dns" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.420163 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fbb65ec-2bd4-480c-8c37-106cad501bb4" containerName="dnsmasq-dns" Dec 02 14:37:04 crc kubenswrapper[4902]: E1202 14:37:04.420175 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25604397-9797-48ac-ad30-bf8b69b00bfa" containerName="probe" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.420181 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="25604397-9797-48ac-ad30-bf8b69b00bfa" containerName="probe" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.420343 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="25604397-9797-48ac-ad30-bf8b69b00bfa" containerName="cinder-scheduler" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.420364 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="25604397-9797-48ac-ad30-bf8b69b00bfa" containerName="probe" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.420382 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fbb65ec-2bd4-480c-8c37-106cad501bb4" containerName="dnsmasq-dns" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.426253 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.431085 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.472092 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.483612 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.484937 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.489935 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.490127 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.490298 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-xcp7n" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.504665 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.545961 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.546027 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-config-data\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.546071 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-openstack-config\") pod \"openstackclient\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.546094 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqnjx\" (UniqueName: \"kubernetes.io/projected/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-kube-api-access-nqnjx\") pod \"openstackclient\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.546120 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxhvl\" (UniqueName: \"kubernetes.io/projected/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-kube-api-access-mxhvl\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.546138 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.546271 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-scripts\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.546406 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-openstack-config-secret\") pod \"openstackclient\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.546427 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.546477 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.648519 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqnjx\" (UniqueName: \"kubernetes.io/projected/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-kube-api-access-nqnjx\") pod \"openstackclient\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.648596 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxhvl\" (UniqueName: \"kubernetes.io/projected/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-kube-api-access-mxhvl\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.648618 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.648642 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-scripts\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.648686 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-openstack-config-secret\") pod \"openstackclient\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.648702 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.648723 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.648791 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.648821 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-config-data\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.648835 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.648860 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-openstack-config\") pod \"openstackclient\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.649730 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-openstack-config\") pod \"openstackclient\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.653918 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.654097 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.656016 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.657933 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-scripts\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.664032 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-openstack-config-secret\") pod \"openstackclient\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.667045 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqnjx\" (UniqueName: \"kubernetes.io/projected/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-kube-api-access-nqnjx\") pod \"openstackclient\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.667349 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-config-data\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.677007 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxhvl\" (UniqueName: \"kubernetes.io/projected/b464cf62-dd2e-4885-9aeb-98c7da8d9e37-kube-api-access-mxhvl\") pod \"cinder-scheduler-0\" (UID: \"b464cf62-dd2e-4885-9aeb-98c7da8d9e37\") " pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.731965 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.732026 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.748829 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.749583 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.752115 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.761697 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.798634 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.800009 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.810363 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.851930 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvx82\" (UniqueName: \"kubernetes.io/projected/1ac7fd15-4a88-4085-ae7c-8f646c29943f-kube-api-access-jvx82\") pod \"openstackclient\" (UID: \"1ac7fd15-4a88-4085-ae7c-8f646c29943f\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.851998 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ac7fd15-4a88-4085-ae7c-8f646c29943f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1ac7fd15-4a88-4085-ae7c-8f646c29943f\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.852122 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1ac7fd15-4a88-4085-ae7c-8f646c29943f-openstack-config\") pod \"openstackclient\" (UID: \"1ac7fd15-4a88-4085-ae7c-8f646c29943f\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.852146 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1ac7fd15-4a88-4085-ae7c-8f646c29943f-openstack-config-secret\") pod \"openstackclient\" (UID: \"1ac7fd15-4a88-4085-ae7c-8f646c29943f\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.955983 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1ac7fd15-4a88-4085-ae7c-8f646c29943f-openstack-config\") pod \"openstackclient\" (UID: \"1ac7fd15-4a88-4085-ae7c-8f646c29943f\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.956258 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1ac7fd15-4a88-4085-ae7c-8f646c29943f-openstack-config-secret\") pod \"openstackclient\" (UID: \"1ac7fd15-4a88-4085-ae7c-8f646c29943f\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.956347 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvx82\" (UniqueName: \"kubernetes.io/projected/1ac7fd15-4a88-4085-ae7c-8f646c29943f-kube-api-access-jvx82\") pod \"openstackclient\" (UID: \"1ac7fd15-4a88-4085-ae7c-8f646c29943f\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.956409 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ac7fd15-4a88-4085-ae7c-8f646c29943f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1ac7fd15-4a88-4085-ae7c-8f646c29943f\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.957198 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1ac7fd15-4a88-4085-ae7c-8f646c29943f-openstack-config\") pod \"openstackclient\" (UID: \"1ac7fd15-4a88-4085-ae7c-8f646c29943f\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.960775 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ac7fd15-4a88-4085-ae7c-8f646c29943f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1ac7fd15-4a88-4085-ae7c-8f646c29943f\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.962909 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1ac7fd15-4a88-4085-ae7c-8f646c29943f-openstack-config-secret\") pod \"openstackclient\" (UID: \"1ac7fd15-4a88-4085-ae7c-8f646c29943f\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: I1202 14:37:04.972488 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvx82\" (UniqueName: \"kubernetes.io/projected/1ac7fd15-4a88-4085-ae7c-8f646c29943f-kube-api-access-jvx82\") pod \"openstackclient\" (UID: \"1ac7fd15-4a88-4085-ae7c-8f646c29943f\") " pod="openstack/openstackclient" Dec 02 14:37:04 crc kubenswrapper[4902]: E1202 14:37:04.980632 4902 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 02 14:37:04 crc kubenswrapper[4902]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_d483a9ab-2d55-41a9-81e2-fd6780fd71d2_0(f1b16f1f53ce811f0f94185c542790a16b3a1d87c2136e3304f9617bd0b48ecb): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"f1b16f1f53ce811f0f94185c542790a16b3a1d87c2136e3304f9617bd0b48ecb" Netns:"/var/run/netns/d07bf011-69f4-4033-8d17-03144466c4e4" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=f1b16f1f53ce811f0f94185c542790a16b3a1d87c2136e3304f9617bd0b48ecb;K8S_POD_UID=d483a9ab-2d55-41a9-81e2-fd6780fd71d2" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/d483a9ab-2d55-41a9-81e2-fd6780fd71d2]: expected pod UID "d483a9ab-2d55-41a9-81e2-fd6780fd71d2" but got "1ac7fd15-4a88-4085-ae7c-8f646c29943f" from Kube API Dec 02 14:37:04 crc kubenswrapper[4902]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 02 14:37:04 crc kubenswrapper[4902]: > Dec 02 14:37:04 crc kubenswrapper[4902]: E1202 14:37:04.980976 4902 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 02 14:37:04 crc kubenswrapper[4902]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_d483a9ab-2d55-41a9-81e2-fd6780fd71d2_0(f1b16f1f53ce811f0f94185c542790a16b3a1d87c2136e3304f9617bd0b48ecb): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"f1b16f1f53ce811f0f94185c542790a16b3a1d87c2136e3304f9617bd0b48ecb" Netns:"/var/run/netns/d07bf011-69f4-4033-8d17-03144466c4e4" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=f1b16f1f53ce811f0f94185c542790a16b3a1d87c2136e3304f9617bd0b48ecb;K8S_POD_UID=d483a9ab-2d55-41a9-81e2-fd6780fd71d2" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/d483a9ab-2d55-41a9-81e2-fd6780fd71d2]: expected pod UID "d483a9ab-2d55-41a9-81e2-fd6780fd71d2" but got "1ac7fd15-4a88-4085-ae7c-8f646c29943f" from Kube API Dec 02 14:37:04 crc kubenswrapper[4902]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 02 14:37:04 crc kubenswrapper[4902]: > pod="openstack/openstackclient" Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.088900 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.119577 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25604397-9797-48ac-ad30-bf8b69b00bfa" path="/var/lib/kubelet/pods/25604397-9797-48ac-ad30-bf8b69b00bfa/volumes" Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.206975 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.326900 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b464cf62-dd2e-4885-9aeb-98c7da8d9e37","Type":"ContainerStarted","Data":"7af638c9410bada66f52416c454673a6f5dee52a534809e0ca423d6c1517755b"} Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.342910 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.361931 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.365113 4902 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="d483a9ab-2d55-41a9-81e2-fd6780fd71d2" podUID="1ac7fd15-4a88-4085-ae7c-8f646c29943f" Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.476957 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqnjx\" (UniqueName: \"kubernetes.io/projected/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-kube-api-access-nqnjx\") pod \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.477010 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-combined-ca-bundle\") pod \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.477096 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-openstack-config\") pod \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.477205 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-openstack-config-secret\") pod \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\" (UID: \"d483a9ab-2d55-41a9-81e2-fd6780fd71d2\") " Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.480026 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "d483a9ab-2d55-41a9-81e2-fd6780fd71d2" (UID: "d483a9ab-2d55-41a9-81e2-fd6780fd71d2"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.497693 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "d483a9ab-2d55-41a9-81e2-fd6780fd71d2" (UID: "d483a9ab-2d55-41a9-81e2-fd6780fd71d2"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.499086 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-kube-api-access-nqnjx" (OuterVolumeSpecName: "kube-api-access-nqnjx") pod "d483a9ab-2d55-41a9-81e2-fd6780fd71d2" (UID: "d483a9ab-2d55-41a9-81e2-fd6780fd71d2"). InnerVolumeSpecName "kube-api-access-nqnjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.519680 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d483a9ab-2d55-41a9-81e2-fd6780fd71d2" (UID: "d483a9ab-2d55-41a9-81e2-fd6780fd71d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.584642 4902 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.584678 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqnjx\" (UniqueName: \"kubernetes.io/projected/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-kube-api-access-nqnjx\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.584687 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.584698 4902 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d483a9ab-2d55-41a9-81e2-fd6780fd71d2-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:05 crc kubenswrapper[4902]: I1202 14:37:05.775886 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 02 14:37:06 crc kubenswrapper[4902]: I1202 14:37:06.357912 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b464cf62-dd2e-4885-9aeb-98c7da8d9e37","Type":"ContainerStarted","Data":"952baf667163b3560fb2fc4a2b5bfa4b426aa0bce0444845eb8aa2ec2e49da03"} Dec 02 14:37:06 crc kubenswrapper[4902]: I1202 14:37:06.365178 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 14:37:06 crc kubenswrapper[4902]: I1202 14:37:06.370999 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1ac7fd15-4a88-4085-ae7c-8f646c29943f","Type":"ContainerStarted","Data":"bd88d4b945959fcc325865cec4bf878a8711167e2474dba128a6ddda307757f8"} Dec 02 14:37:06 crc kubenswrapper[4902]: I1202 14:37:06.428963 4902 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="d483a9ab-2d55-41a9-81e2-fd6780fd71d2" podUID="1ac7fd15-4a88-4085-ae7c-8f646c29943f" Dec 02 14:37:06 crc kubenswrapper[4902]: I1202 14:37:06.938088 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:37:07 crc kubenswrapper[4902]: I1202 14:37:07.119937 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d483a9ab-2d55-41a9-81e2-fd6780fd71d2" path="/var/lib/kubelet/pods/d483a9ab-2d55-41a9-81e2-fd6780fd71d2/volumes" Dec 02 14:37:07 crc kubenswrapper[4902]: I1202 14:37:07.120293 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 02 14:37:07 crc kubenswrapper[4902]: I1202 14:37:07.382048 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b464cf62-dd2e-4885-9aeb-98c7da8d9e37","Type":"ContainerStarted","Data":"d0bfb21ccbc9d6d56a35de6984728b1abfb8cd5b5a7cda6015d79c1327807b10"} Dec 02 14:37:07 crc kubenswrapper[4902]: I1202 14:37:07.402118 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.402098771 podStartE2EDuration="3.402098771s" podCreationTimestamp="2025-12-02 14:37:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:37:07.399355024 +0000 UTC m=+1258.590663733" watchObservedRunningTime="2025-12-02 14:37:07.402098771 +0000 UTC m=+1258.593407470" Dec 02 14:37:07 crc kubenswrapper[4902]: I1202 14:37:07.520605 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54c8dd8948-wqzzm" Dec 02 14:37:07 crc kubenswrapper[4902]: I1202 14:37:07.584351 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-54b4cc97b8-h6x6n"] Dec 02 14:37:07 crc kubenswrapper[4902]: I1202 14:37:07.584588 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-54b4cc97b8-h6x6n" podUID="e27977c3-7e5e-47b7-b217-96acb16119c2" containerName="barbican-api-log" containerID="cri-o://9330f452e963c1ddaf725f531a15ebfa67af0e359e755b9f05c50d6806dbb737" gracePeriod=30 Dec 02 14:37:07 crc kubenswrapper[4902]: I1202 14:37:07.584946 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-54b4cc97b8-h6x6n" podUID="e27977c3-7e5e-47b7-b217-96acb16119c2" containerName="barbican-api" containerID="cri-o://c3914c6f96c69b50f09e5cde441306769d8d13bee3e4a902660bac1403330b7f" gracePeriod=30 Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.393066 4902 generic.go:334] "Generic (PLEG): container finished" podID="e27977c3-7e5e-47b7-b217-96acb16119c2" containerID="9330f452e963c1ddaf725f531a15ebfa67af0e359e755b9f05c50d6806dbb737" exitCode=143 Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.394058 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54b4cc97b8-h6x6n" event={"ID":"e27977c3-7e5e-47b7-b217-96acb16119c2","Type":"ContainerDied","Data":"9330f452e963c1ddaf725f531a15ebfa67af0e359e755b9f05c50d6806dbb737"} Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.485678 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-65c8455667-7wzb6"] Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.487266 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.488954 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.491728 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.491943 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.503386 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-65c8455667-7wzb6"] Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.537741 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thb8l\" (UniqueName: \"kubernetes.io/projected/8804e694-63ac-4278-8672-56c862db1007-kube-api-access-thb8l\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.537843 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8804e694-63ac-4278-8672-56c862db1007-combined-ca-bundle\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.537872 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8804e694-63ac-4278-8672-56c862db1007-public-tls-certs\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.537895 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8804e694-63ac-4278-8672-56c862db1007-internal-tls-certs\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.537950 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8804e694-63ac-4278-8672-56c862db1007-run-httpd\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.537977 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8804e694-63ac-4278-8672-56c862db1007-log-httpd\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.537994 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8804e694-63ac-4278-8672-56c862db1007-config-data\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.538019 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8804e694-63ac-4278-8672-56c862db1007-etc-swift\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.639510 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8804e694-63ac-4278-8672-56c862db1007-log-httpd\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.639551 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8804e694-63ac-4278-8672-56c862db1007-config-data\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.639594 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8804e694-63ac-4278-8672-56c862db1007-etc-swift\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.639632 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thb8l\" (UniqueName: \"kubernetes.io/projected/8804e694-63ac-4278-8672-56c862db1007-kube-api-access-thb8l\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.639710 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8804e694-63ac-4278-8672-56c862db1007-combined-ca-bundle\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.639735 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8804e694-63ac-4278-8672-56c862db1007-public-tls-certs\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.639756 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8804e694-63ac-4278-8672-56c862db1007-internal-tls-certs\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.639802 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8804e694-63ac-4278-8672-56c862db1007-run-httpd\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.640249 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8804e694-63ac-4278-8672-56c862db1007-log-httpd\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.640310 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8804e694-63ac-4278-8672-56c862db1007-run-httpd\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.645407 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8804e694-63ac-4278-8672-56c862db1007-config-data\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.647230 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8804e694-63ac-4278-8672-56c862db1007-internal-tls-certs\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.652481 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8804e694-63ac-4278-8672-56c862db1007-etc-swift\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.659725 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8804e694-63ac-4278-8672-56c862db1007-combined-ca-bundle\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.670171 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thb8l\" (UniqueName: \"kubernetes.io/projected/8804e694-63ac-4278-8672-56c862db1007-kube-api-access-thb8l\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.673119 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8804e694-63ac-4278-8672-56c862db1007-public-tls-certs\") pod \"swift-proxy-65c8455667-7wzb6\" (UID: \"8804e694-63ac-4278-8672-56c862db1007\") " pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:08 crc kubenswrapper[4902]: I1202 14:37:08.839905 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:09 crc kubenswrapper[4902]: I1202 14:37:09.476104 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-65c8455667-7wzb6"] Dec 02 14:37:09 crc kubenswrapper[4902]: I1202 14:37:09.753457 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 02 14:37:10 crc kubenswrapper[4902]: I1202 14:37:10.417397 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-65c8455667-7wzb6" event={"ID":"8804e694-63ac-4278-8672-56c862db1007","Type":"ContainerStarted","Data":"983a00d11949bc4c228e1249da51a47de5af6d41e3efd8e0b1072381e5d0e113"} Dec 02 14:37:10 crc kubenswrapper[4902]: I1202 14:37:10.417438 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-65c8455667-7wzb6" event={"ID":"8804e694-63ac-4278-8672-56c862db1007","Type":"ContainerStarted","Data":"18319a391624fcdf53f48afe0b97b4cd72d8c9c87b95988dc846ace7a3e99176"} Dec 02 14:37:10 crc kubenswrapper[4902]: I1202 14:37:10.417448 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-65c8455667-7wzb6" event={"ID":"8804e694-63ac-4278-8672-56c862db1007","Type":"ContainerStarted","Data":"d939d94c4e5186283404329ffdb2802390ec47e3780d409db31c69f577428bf4"} Dec 02 14:37:10 crc kubenswrapper[4902]: I1202 14:37:10.417534 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:10 crc kubenswrapper[4902]: I1202 14:37:10.417548 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:10 crc kubenswrapper[4902]: I1202 14:37:10.438588 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-65c8455667-7wzb6" podStartSLOduration=2.438555527 podStartE2EDuration="2.438555527s" podCreationTimestamp="2025-12-02 14:37:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:37:10.433950068 +0000 UTC m=+1261.625258777" watchObservedRunningTime="2025-12-02 14:37:10.438555527 +0000 UTC m=+1261.629864226" Dec 02 14:37:10 crc kubenswrapper[4902]: I1202 14:37:10.740168 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-54b4cc97b8-h6x6n" podUID="e27977c3-7e5e-47b7-b217-96acb16119c2" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": read tcp 10.217.0.2:55276->10.217.0.182:9311: read: connection reset by peer" Dec 02 14:37:10 crc kubenswrapper[4902]: I1202 14:37:10.740213 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-54b4cc97b8-h6x6n" podUID="e27977c3-7e5e-47b7-b217-96acb16119c2" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": read tcp 10.217.0.2:55292->10.217.0.182:9311: read: connection reset by peer" Dec 02 14:37:11 crc kubenswrapper[4902]: I1202 14:37:11.444426 4902 generic.go:334] "Generic (PLEG): container finished" podID="e27977c3-7e5e-47b7-b217-96acb16119c2" containerID="c3914c6f96c69b50f09e5cde441306769d8d13bee3e4a902660bac1403330b7f" exitCode=0 Dec 02 14:37:11 crc kubenswrapper[4902]: I1202 14:37:11.445826 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54b4cc97b8-h6x6n" event={"ID":"e27977c3-7e5e-47b7-b217-96acb16119c2","Type":"ContainerDied","Data":"c3914c6f96c69b50f09e5cde441306769d8d13bee3e4a902660bac1403330b7f"} Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.211893 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-5f7jd"] Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.213665 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5f7jd" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.244064 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-5f7jd"] Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.275111 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq5lg\" (UniqueName: \"kubernetes.io/projected/64fa5872-7787-4029-b9ce-a33b115c5858-kube-api-access-mq5lg\") pod \"nova-api-db-create-5f7jd\" (UID: \"64fa5872-7787-4029-b9ce-a33b115c5858\") " pod="openstack/nova-api-db-create-5f7jd" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.275152 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64fa5872-7787-4029-b9ce-a33b115c5858-operator-scripts\") pod \"nova-api-db-create-5f7jd\" (UID: \"64fa5872-7787-4029-b9ce-a33b115c5858\") " pod="openstack/nova-api-db-create-5f7jd" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.304501 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-th6wm"] Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.305746 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-th6wm" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.318402 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-th6wm"] Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.377025 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da153901-c3b2-469c-944b-48209050c85c-operator-scripts\") pod \"nova-cell0-db-create-th6wm\" (UID: \"da153901-c3b2-469c-944b-48209050c85c\") " pod="openstack/nova-cell0-db-create-th6wm" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.377158 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq5lg\" (UniqueName: \"kubernetes.io/projected/64fa5872-7787-4029-b9ce-a33b115c5858-kube-api-access-mq5lg\") pod \"nova-api-db-create-5f7jd\" (UID: \"64fa5872-7787-4029-b9ce-a33b115c5858\") " pod="openstack/nova-api-db-create-5f7jd" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.377193 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64fa5872-7787-4029-b9ce-a33b115c5858-operator-scripts\") pod \"nova-api-db-create-5f7jd\" (UID: \"64fa5872-7787-4029-b9ce-a33b115c5858\") " pod="openstack/nova-api-db-create-5f7jd" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.377271 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4k9gv\" (UniqueName: \"kubernetes.io/projected/da153901-c3b2-469c-944b-48209050c85c-kube-api-access-4k9gv\") pod \"nova-cell0-db-create-th6wm\" (UID: \"da153901-c3b2-469c-944b-48209050c85c\") " pod="openstack/nova-cell0-db-create-th6wm" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.379942 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64fa5872-7787-4029-b9ce-a33b115c5858-operator-scripts\") pod \"nova-api-db-create-5f7jd\" (UID: \"64fa5872-7787-4029-b9ce-a33b115c5858\") " pod="openstack/nova-api-db-create-5f7jd" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.416401 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-4a51-account-create-update-4fmkl"] Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.417711 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq5lg\" (UniqueName: \"kubernetes.io/projected/64fa5872-7787-4029-b9ce-a33b115c5858-kube-api-access-mq5lg\") pod \"nova-api-db-create-5f7jd\" (UID: \"64fa5872-7787-4029-b9ce-a33b115c5858\") " pod="openstack/nova-api-db-create-5f7jd" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.418216 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4a51-account-create-update-4fmkl" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.422687 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.435163 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-75k7n"] Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.436577 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-75k7n" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.460055 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-4a51-account-create-update-4fmkl"] Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.473711 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-75k7n"] Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.479870 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jzff\" (UniqueName: \"kubernetes.io/projected/2bcd69e0-8303-46f6-bef1-37faec667798-kube-api-access-2jzff\") pod \"nova-api-4a51-account-create-update-4fmkl\" (UID: \"2bcd69e0-8303-46f6-bef1-37faec667798\") " pod="openstack/nova-api-4a51-account-create-update-4fmkl" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.479958 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bcd69e0-8303-46f6-bef1-37faec667798-operator-scripts\") pod \"nova-api-4a51-account-create-update-4fmkl\" (UID: \"2bcd69e0-8303-46f6-bef1-37faec667798\") " pod="openstack/nova-api-4a51-account-create-update-4fmkl" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.480023 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4k9gv\" (UniqueName: \"kubernetes.io/projected/da153901-c3b2-469c-944b-48209050c85c-kube-api-access-4k9gv\") pod \"nova-cell0-db-create-th6wm\" (UID: \"da153901-c3b2-469c-944b-48209050c85c\") " pod="openstack/nova-cell0-db-create-th6wm" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.480047 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4c64af0-fa3b-4607-be3b-cda80bc8831c-operator-scripts\") pod \"nova-cell1-db-create-75k7n\" (UID: \"e4c64af0-fa3b-4607-be3b-cda80bc8831c\") " pod="openstack/nova-cell1-db-create-75k7n" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.480166 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgdmk\" (UniqueName: \"kubernetes.io/projected/e4c64af0-fa3b-4607-be3b-cda80bc8831c-kube-api-access-sgdmk\") pod \"nova-cell1-db-create-75k7n\" (UID: \"e4c64af0-fa3b-4607-be3b-cda80bc8831c\") " pod="openstack/nova-cell1-db-create-75k7n" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.480205 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da153901-c3b2-469c-944b-48209050c85c-operator-scripts\") pod \"nova-cell0-db-create-th6wm\" (UID: \"da153901-c3b2-469c-944b-48209050c85c\") " pod="openstack/nova-cell0-db-create-th6wm" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.497215 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da153901-c3b2-469c-944b-48209050c85c-operator-scripts\") pod \"nova-cell0-db-create-th6wm\" (UID: \"da153901-c3b2-469c-944b-48209050c85c\") " pod="openstack/nova-cell0-db-create-th6wm" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.502338 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4k9gv\" (UniqueName: \"kubernetes.io/projected/da153901-c3b2-469c-944b-48209050c85c-kube-api-access-4k9gv\") pod \"nova-cell0-db-create-th6wm\" (UID: \"da153901-c3b2-469c-944b-48209050c85c\") " pod="openstack/nova-cell0-db-create-th6wm" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.533512 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5f7jd" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.581539 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgdmk\" (UniqueName: \"kubernetes.io/projected/e4c64af0-fa3b-4607-be3b-cda80bc8831c-kube-api-access-sgdmk\") pod \"nova-cell1-db-create-75k7n\" (UID: \"e4c64af0-fa3b-4607-be3b-cda80bc8831c\") " pod="openstack/nova-cell1-db-create-75k7n" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.581935 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jzff\" (UniqueName: \"kubernetes.io/projected/2bcd69e0-8303-46f6-bef1-37faec667798-kube-api-access-2jzff\") pod \"nova-api-4a51-account-create-update-4fmkl\" (UID: \"2bcd69e0-8303-46f6-bef1-37faec667798\") " pod="openstack/nova-api-4a51-account-create-update-4fmkl" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.581965 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bcd69e0-8303-46f6-bef1-37faec667798-operator-scripts\") pod \"nova-api-4a51-account-create-update-4fmkl\" (UID: \"2bcd69e0-8303-46f6-bef1-37faec667798\") " pod="openstack/nova-api-4a51-account-create-update-4fmkl" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.581987 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4c64af0-fa3b-4607-be3b-cda80bc8831c-operator-scripts\") pod \"nova-cell1-db-create-75k7n\" (UID: \"e4c64af0-fa3b-4607-be3b-cda80bc8831c\") " pod="openstack/nova-cell1-db-create-75k7n" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.582747 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4c64af0-fa3b-4607-be3b-cda80bc8831c-operator-scripts\") pod \"nova-cell1-db-create-75k7n\" (UID: \"e4c64af0-fa3b-4607-be3b-cda80bc8831c\") " pod="openstack/nova-cell1-db-create-75k7n" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.582957 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bcd69e0-8303-46f6-bef1-37faec667798-operator-scripts\") pod \"nova-api-4a51-account-create-update-4fmkl\" (UID: \"2bcd69e0-8303-46f6-bef1-37faec667798\") " pod="openstack/nova-api-4a51-account-create-update-4fmkl" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.600440 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jzff\" (UniqueName: \"kubernetes.io/projected/2bcd69e0-8303-46f6-bef1-37faec667798-kube-api-access-2jzff\") pod \"nova-api-4a51-account-create-update-4fmkl\" (UID: \"2bcd69e0-8303-46f6-bef1-37faec667798\") " pod="openstack/nova-api-4a51-account-create-update-4fmkl" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.605098 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgdmk\" (UniqueName: \"kubernetes.io/projected/e4c64af0-fa3b-4607-be3b-cda80bc8831c-kube-api-access-sgdmk\") pod \"nova-cell1-db-create-75k7n\" (UID: \"e4c64af0-fa3b-4607-be3b-cda80bc8831c\") " pod="openstack/nova-cell1-db-create-75k7n" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.620211 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-5e4d-account-create-update-xvr22"] Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.621531 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5e4d-account-create-update-xvr22" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.623738 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.626822 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-th6wm" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.630595 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-5e4d-account-create-update-xvr22"] Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.683225 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pv5ps\" (UniqueName: \"kubernetes.io/projected/0eb390fd-b392-4ec8-990b-7ed540083355-kube-api-access-pv5ps\") pod \"nova-cell0-5e4d-account-create-update-xvr22\" (UID: \"0eb390fd-b392-4ec8-990b-7ed540083355\") " pod="openstack/nova-cell0-5e4d-account-create-update-xvr22" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.683310 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0eb390fd-b392-4ec8-990b-7ed540083355-operator-scripts\") pod \"nova-cell0-5e4d-account-create-update-xvr22\" (UID: \"0eb390fd-b392-4ec8-990b-7ed540083355\") " pod="openstack/nova-cell0-5e4d-account-create-update-xvr22" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.779154 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4a51-account-create-update-4fmkl" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.784653 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pv5ps\" (UniqueName: \"kubernetes.io/projected/0eb390fd-b392-4ec8-990b-7ed540083355-kube-api-access-pv5ps\") pod \"nova-cell0-5e4d-account-create-update-xvr22\" (UID: \"0eb390fd-b392-4ec8-990b-7ed540083355\") " pod="openstack/nova-cell0-5e4d-account-create-update-xvr22" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.784727 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0eb390fd-b392-4ec8-990b-7ed540083355-operator-scripts\") pod \"nova-cell0-5e4d-account-create-update-xvr22\" (UID: \"0eb390fd-b392-4ec8-990b-7ed540083355\") " pod="openstack/nova-cell0-5e4d-account-create-update-xvr22" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.785385 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0eb390fd-b392-4ec8-990b-7ed540083355-operator-scripts\") pod \"nova-cell0-5e4d-account-create-update-xvr22\" (UID: \"0eb390fd-b392-4ec8-990b-7ed540083355\") " pod="openstack/nova-cell0-5e4d-account-create-update-xvr22" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.793487 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-75k7n" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.804157 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pv5ps\" (UniqueName: \"kubernetes.io/projected/0eb390fd-b392-4ec8-990b-7ed540083355-kube-api-access-pv5ps\") pod \"nova-cell0-5e4d-account-create-update-xvr22\" (UID: \"0eb390fd-b392-4ec8-990b-7ed540083355\") " pod="openstack/nova-cell0-5e4d-account-create-update-xvr22" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.813091 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-42af-account-create-update-dg87s"] Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.814427 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-42af-account-create-update-dg87s" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.821339 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-42af-account-create-update-dg87s"] Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.823083 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.886955 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbxm7\" (UniqueName: \"kubernetes.io/projected/5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe-kube-api-access-bbxm7\") pod \"nova-cell1-42af-account-create-update-dg87s\" (UID: \"5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe\") " pod="openstack/nova-cell1-42af-account-create-update-dg87s" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.887153 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe-operator-scripts\") pod \"nova-cell1-42af-account-create-update-dg87s\" (UID: \"5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe\") " pod="openstack/nova-cell1-42af-account-create-update-dg87s" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.989000 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe-operator-scripts\") pod \"nova-cell1-42af-account-create-update-dg87s\" (UID: \"5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe\") " pod="openstack/nova-cell1-42af-account-create-update-dg87s" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.989065 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbxm7\" (UniqueName: \"kubernetes.io/projected/5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe-kube-api-access-bbxm7\") pod \"nova-cell1-42af-account-create-update-dg87s\" (UID: \"5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe\") " pod="openstack/nova-cell1-42af-account-create-update-dg87s" Dec 02 14:37:14 crc kubenswrapper[4902]: I1202 14:37:14.989959 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe-operator-scripts\") pod \"nova-cell1-42af-account-create-update-dg87s\" (UID: \"5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe\") " pod="openstack/nova-cell1-42af-account-create-update-dg87s" Dec 02 14:37:15 crc kubenswrapper[4902]: I1202 14:37:15.000107 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5e4d-account-create-update-xvr22" Dec 02 14:37:15 crc kubenswrapper[4902]: I1202 14:37:15.004827 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbxm7\" (UniqueName: \"kubernetes.io/projected/5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe-kube-api-access-bbxm7\") pod \"nova-cell1-42af-account-create-update-dg87s\" (UID: \"5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe\") " pod="openstack/nova-cell1-42af-account-create-update-dg87s" Dec 02 14:37:15 crc kubenswrapper[4902]: I1202 14:37:15.036631 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 02 14:37:15 crc kubenswrapper[4902]: I1202 14:37:15.061392 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 02 14:37:15 crc kubenswrapper[4902]: I1202 14:37:15.061608 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-decision-engine-0" podUID="f67dc251-e88f-4954-9410-bacba8c2b367" containerName="watcher-decision-engine" containerID="cri-o://be95115dbb74b92e24593b3465532ac2427dfbfeb51637d1ba0d5899244232ff" gracePeriod=30 Dec 02 14:37:15 crc kubenswrapper[4902]: I1202 14:37:15.160323 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-42af-account-create-update-dg87s" Dec 02 14:37:16 crc kubenswrapper[4902]: I1202 14:37:16.494456 4902 generic.go:334] "Generic (PLEG): container finished" podID="f67dc251-e88f-4954-9410-bacba8c2b367" containerID="be95115dbb74b92e24593b3465532ac2427dfbfeb51637d1ba0d5899244232ff" exitCode=0 Dec 02 14:37:16 crc kubenswrapper[4902]: I1202 14:37:16.494545 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67dc251-e88f-4954-9410-bacba8c2b367","Type":"ContainerDied","Data":"be95115dbb74b92e24593b3465532ac2427dfbfeb51637d1ba0d5899244232ff"} Dec 02 14:37:16 crc kubenswrapper[4902]: I1202 14:37:16.903749 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:37:16 crc kubenswrapper[4902]: I1202 14:37:16.932589 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-config-data\") pod \"e27977c3-7e5e-47b7-b217-96acb16119c2\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " Dec 02 14:37:16 crc kubenswrapper[4902]: I1202 14:37:16.932715 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-config-data-custom\") pod \"e27977c3-7e5e-47b7-b217-96acb16119c2\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " Dec 02 14:37:16 crc kubenswrapper[4902]: I1202 14:37:16.932778 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e27977c3-7e5e-47b7-b217-96acb16119c2-logs\") pod \"e27977c3-7e5e-47b7-b217-96acb16119c2\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " Dec 02 14:37:16 crc kubenswrapper[4902]: I1202 14:37:16.932860 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-combined-ca-bundle\") pod \"e27977c3-7e5e-47b7-b217-96acb16119c2\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " Dec 02 14:37:16 crc kubenswrapper[4902]: I1202 14:37:16.933552 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktp28\" (UniqueName: \"kubernetes.io/projected/e27977c3-7e5e-47b7-b217-96acb16119c2-kube-api-access-ktp28\") pod \"e27977c3-7e5e-47b7-b217-96acb16119c2\" (UID: \"e27977c3-7e5e-47b7-b217-96acb16119c2\") " Dec 02 14:37:16 crc kubenswrapper[4902]: I1202 14:37:16.934437 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e27977c3-7e5e-47b7-b217-96acb16119c2-logs" (OuterVolumeSpecName: "logs") pod "e27977c3-7e5e-47b7-b217-96acb16119c2" (UID: "e27977c3-7e5e-47b7-b217-96acb16119c2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:37:16 crc kubenswrapper[4902]: I1202 14:37:16.938957 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e27977c3-7e5e-47b7-b217-96acb16119c2" (UID: "e27977c3-7e5e-47b7-b217-96acb16119c2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:16 crc kubenswrapper[4902]: I1202 14:37:16.956989 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e27977c3-7e5e-47b7-b217-96acb16119c2-kube-api-access-ktp28" (OuterVolumeSpecName: "kube-api-access-ktp28") pod "e27977c3-7e5e-47b7-b217-96acb16119c2" (UID: "e27977c3-7e5e-47b7-b217-96acb16119c2"). InnerVolumeSpecName "kube-api-access-ktp28". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:16 crc kubenswrapper[4902]: I1202 14:37:16.970066 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e27977c3-7e5e-47b7-b217-96acb16119c2" (UID: "e27977c3-7e5e-47b7-b217-96acb16119c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.036043 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktp28\" (UniqueName: \"kubernetes.io/projected/e27977c3-7e5e-47b7-b217-96acb16119c2-kube-api-access-ktp28\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.036328 4902 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.036340 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e27977c3-7e5e-47b7-b217-96acb16119c2-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.036348 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.060940 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-config-data" (OuterVolumeSpecName: "config-data") pod "e27977c3-7e5e-47b7-b217-96acb16119c2" (UID: "e27977c3-7e5e-47b7-b217-96acb16119c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.137827 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e27977c3-7e5e-47b7-b217-96acb16119c2-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.182394 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.238872 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99lhh\" (UniqueName: \"kubernetes.io/projected/f67dc251-e88f-4954-9410-bacba8c2b367-kube-api-access-99lhh\") pod \"f67dc251-e88f-4954-9410-bacba8c2b367\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.238941 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-custom-prometheus-ca\") pod \"f67dc251-e88f-4954-9410-bacba8c2b367\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.238972 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-config-data\") pod \"f67dc251-e88f-4954-9410-bacba8c2b367\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.239020 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-combined-ca-bundle\") pod \"f67dc251-e88f-4954-9410-bacba8c2b367\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.239177 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f67dc251-e88f-4954-9410-bacba8c2b367-logs\") pod \"f67dc251-e88f-4954-9410-bacba8c2b367\" (UID: \"f67dc251-e88f-4954-9410-bacba8c2b367\") " Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.239962 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f67dc251-e88f-4954-9410-bacba8c2b367-logs" (OuterVolumeSpecName: "logs") pod "f67dc251-e88f-4954-9410-bacba8c2b367" (UID: "f67dc251-e88f-4954-9410-bacba8c2b367"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.244857 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f67dc251-e88f-4954-9410-bacba8c2b367-kube-api-access-99lhh" (OuterVolumeSpecName: "kube-api-access-99lhh") pod "f67dc251-e88f-4954-9410-bacba8c2b367" (UID: "f67dc251-e88f-4954-9410-bacba8c2b367"). InnerVolumeSpecName "kube-api-access-99lhh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.289863 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "f67dc251-e88f-4954-9410-bacba8c2b367" (UID: "f67dc251-e88f-4954-9410-bacba8c2b367"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.300164 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f67dc251-e88f-4954-9410-bacba8c2b367" (UID: "f67dc251-e88f-4954-9410-bacba8c2b367"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.305979 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-config-data" (OuterVolumeSpecName: "config-data") pod "f67dc251-e88f-4954-9410-bacba8c2b367" (UID: "f67dc251-e88f-4954-9410-bacba8c2b367"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.345089 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f67dc251-e88f-4954-9410-bacba8c2b367-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.345133 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99lhh\" (UniqueName: \"kubernetes.io/projected/f67dc251-e88f-4954-9410-bacba8c2b367-kube-api-access-99lhh\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.345147 4902 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.345156 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.345164 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f67dc251-e88f-4954-9410-bacba8c2b367-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.361497 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-th6wm"] Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.505632 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-th6wm" event={"ID":"da153901-c3b2-469c-944b-48209050c85c","Type":"ContainerStarted","Data":"488838a0772fb4c51d96e5f24b0e211852750ffa8a79b7a01caee9b2966e61c8"} Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.506714 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1ac7fd15-4a88-4085-ae7c-8f646c29943f","Type":"ContainerStarted","Data":"751abe6f307ad4a25cf019111ca6c6ab48debc82fa2616698ad5186cf6e3ee4b"} Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.516858 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54b4cc97b8-h6x6n" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.516816 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54b4cc97b8-h6x6n" event={"ID":"e27977c3-7e5e-47b7-b217-96acb16119c2","Type":"ContainerDied","Data":"aeb136aeaa58efb731b307b58754c781a01d030af4218ec3cbd11bd999a798c1"} Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.516947 4902 scope.go:117] "RemoveContainer" containerID="c3914c6f96c69b50f09e5cde441306769d8d13bee3e4a902660bac1403330b7f" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.523304 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"f67dc251-e88f-4954-9410-bacba8c2b367","Type":"ContainerDied","Data":"70162a8bebd26998d9d9e8dd26ecdea4853eb054266cd648540c36f86a3d229c"} Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.523391 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.556079 4902 scope.go:117] "RemoveContainer" containerID="9330f452e963c1ddaf725f531a15ebfa67af0e359e755b9f05c50d6806dbb737" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.564268 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.307048657 podStartE2EDuration="13.554890182s" podCreationTimestamp="2025-12-02 14:37:04 +0000 UTC" firstStartedPulling="2025-12-02 14:37:05.76871227 +0000 UTC m=+1256.960020969" lastFinishedPulling="2025-12-02 14:37:17.016553785 +0000 UTC m=+1268.207862494" observedRunningTime="2025-12-02 14:37:17.52991952 +0000 UTC m=+1268.721228229" watchObservedRunningTime="2025-12-02 14:37:17.554890182 +0000 UTC m=+1268.746198891" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.588975 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-54b4cc97b8-h6x6n"] Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.601426 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-54b4cc97b8-h6x6n"] Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.609908 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.616684 4902 scope.go:117] "RemoveContainer" containerID="be95115dbb74b92e24593b3465532ac2427dfbfeb51637d1ba0d5899244232ff" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.628975 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.640161 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-5f7jd"] Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.660896 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 02 14:37:17 crc kubenswrapper[4902]: E1202 14:37:17.661422 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f67dc251-e88f-4954-9410-bacba8c2b367" containerName="watcher-decision-engine" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.661441 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f67dc251-e88f-4954-9410-bacba8c2b367" containerName="watcher-decision-engine" Dec 02 14:37:17 crc kubenswrapper[4902]: E1202 14:37:17.661459 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e27977c3-7e5e-47b7-b217-96acb16119c2" containerName="barbican-api-log" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.661466 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="e27977c3-7e5e-47b7-b217-96acb16119c2" containerName="barbican-api-log" Dec 02 14:37:17 crc kubenswrapper[4902]: E1202 14:37:17.661486 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e27977c3-7e5e-47b7-b217-96acb16119c2" containerName="barbican-api" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.661493 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="e27977c3-7e5e-47b7-b217-96acb16119c2" containerName="barbican-api" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.661778 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="e27977c3-7e5e-47b7-b217-96acb16119c2" containerName="barbican-api" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.661819 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="e27977c3-7e5e-47b7-b217-96acb16119c2" containerName="barbican-api-log" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.661837 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f67dc251-e88f-4954-9410-bacba8c2b367" containerName="watcher-decision-engine" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.662507 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.664974 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.688173 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.698440 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-75k7n"] Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.706618 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-5e4d-account-create-update-xvr22"] Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.755238 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmbn2\" (UniqueName: \"kubernetes.io/projected/d5838a49-52eb-4485-8eda-14772a0f60bd-kube-api-access-jmbn2\") pod \"watcher-decision-engine-0\" (UID: \"d5838a49-52eb-4485-8eda-14772a0f60bd\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.755308 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5838a49-52eb-4485-8eda-14772a0f60bd-config-data\") pod \"watcher-decision-engine-0\" (UID: \"d5838a49-52eb-4485-8eda-14772a0f60bd\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.755339 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5838a49-52eb-4485-8eda-14772a0f60bd-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"d5838a49-52eb-4485-8eda-14772a0f60bd\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.755411 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5838a49-52eb-4485-8eda-14772a0f60bd-logs\") pod \"watcher-decision-engine-0\" (UID: \"d5838a49-52eb-4485-8eda-14772a0f60bd\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.755436 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/d5838a49-52eb-4485-8eda-14772a0f60bd-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"d5838a49-52eb-4485-8eda-14772a0f60bd\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.795329 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-42af-account-create-update-dg87s"] Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.802873 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-4a51-account-create-update-4fmkl"] Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.856928 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5838a49-52eb-4485-8eda-14772a0f60bd-logs\") pod \"watcher-decision-engine-0\" (UID: \"d5838a49-52eb-4485-8eda-14772a0f60bd\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.856979 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/d5838a49-52eb-4485-8eda-14772a0f60bd-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"d5838a49-52eb-4485-8eda-14772a0f60bd\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.857051 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmbn2\" (UniqueName: \"kubernetes.io/projected/d5838a49-52eb-4485-8eda-14772a0f60bd-kube-api-access-jmbn2\") pod \"watcher-decision-engine-0\" (UID: \"d5838a49-52eb-4485-8eda-14772a0f60bd\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.857076 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5838a49-52eb-4485-8eda-14772a0f60bd-config-data\") pod \"watcher-decision-engine-0\" (UID: \"d5838a49-52eb-4485-8eda-14772a0f60bd\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.857100 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5838a49-52eb-4485-8eda-14772a0f60bd-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"d5838a49-52eb-4485-8eda-14772a0f60bd\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.859262 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5838a49-52eb-4485-8eda-14772a0f60bd-logs\") pod \"watcher-decision-engine-0\" (UID: \"d5838a49-52eb-4485-8eda-14772a0f60bd\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.862264 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/d5838a49-52eb-4485-8eda-14772a0f60bd-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"d5838a49-52eb-4485-8eda-14772a0f60bd\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.864209 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5838a49-52eb-4485-8eda-14772a0f60bd-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"d5838a49-52eb-4485-8eda-14772a0f60bd\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.868334 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5838a49-52eb-4485-8eda-14772a0f60bd-config-data\") pod \"watcher-decision-engine-0\" (UID: \"d5838a49-52eb-4485-8eda-14772a0f60bd\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:37:17 crc kubenswrapper[4902]: I1202 14:37:17.873582 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmbn2\" (UniqueName: \"kubernetes.io/projected/d5838a49-52eb-4485-8eda-14772a0f60bd-kube-api-access-jmbn2\") pod \"watcher-decision-engine-0\" (UID: \"d5838a49-52eb-4485-8eda-14772a0f60bd\") " pod="openstack/watcher-decision-engine-0" Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.000450 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.496983 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.545311 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4a51-account-create-update-4fmkl" event={"ID":"2bcd69e0-8303-46f6-bef1-37faec667798","Type":"ContainerStarted","Data":"552dfcf87005ca59310662896ad001ab43c897dbee455e5e12df34fa48d0fa4f"} Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.545351 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4a51-account-create-update-4fmkl" event={"ID":"2bcd69e0-8303-46f6-bef1-37faec667798","Type":"ContainerStarted","Data":"d1499eb1dc08d49f59921c19e73c40139c1af7dd7e2059454a6c8cff6d7fc64f"} Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.549432 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-75k7n" event={"ID":"e4c64af0-fa3b-4607-be3b-cda80bc8831c","Type":"ContainerStarted","Data":"08bd7f1c99f21758ef5105c0056679303417279d87a42fb34043709597a53525"} Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.549479 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-75k7n" event={"ID":"e4c64af0-fa3b-4607-be3b-cda80bc8831c","Type":"ContainerStarted","Data":"ec8f323e725b528ecb44e0aaddfd2a8da263cea053f59a03836356cdfaeede81"} Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.550831 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"d5838a49-52eb-4485-8eda-14772a0f60bd","Type":"ContainerStarted","Data":"04356b6e33a59561fa2684ef0e87b527dbbb95a99d23333085fb361dbd3cbeea"} Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.561457 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-42af-account-create-update-dg87s" event={"ID":"5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe","Type":"ContainerStarted","Data":"f7048a9cf7f52b14f09d41227b59b83e7947541cc16f806fa1acf9327b70ea9a"} Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.561807 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-42af-account-create-update-dg87s" event={"ID":"5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe","Type":"ContainerStarted","Data":"fe2adabc2558bb834af040915f857f3df9099e24572730fad555d435d6d3a070"} Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.569341 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-4a51-account-create-update-4fmkl" podStartSLOduration=4.569322019 podStartE2EDuration="4.569322019s" podCreationTimestamp="2025-12-02 14:37:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:37:18.560343116 +0000 UTC m=+1269.751651825" watchObservedRunningTime="2025-12-02 14:37:18.569322019 +0000 UTC m=+1269.760630718" Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.587790 4902 generic.go:334] "Generic (PLEG): container finished" podID="0eb390fd-b392-4ec8-990b-7ed540083355" containerID="b78f5a1273a9946fd83c1f5d806d27ab77dbe36d7052343a8b3e9835f6adb6bd" exitCode=0 Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.587988 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5e4d-account-create-update-xvr22" event={"ID":"0eb390fd-b392-4ec8-990b-7ed540083355","Type":"ContainerDied","Data":"b78f5a1273a9946fd83c1f5d806d27ab77dbe36d7052343a8b3e9835f6adb6bd"} Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.588033 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5e4d-account-create-update-xvr22" event={"ID":"0eb390fd-b392-4ec8-990b-7ed540083355","Type":"ContainerStarted","Data":"141eba3cfe3eeeee01b0588d4106765c774176438b94cbdbcac40d95e23def4f"} Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.605352 4902 generic.go:334] "Generic (PLEG): container finished" podID="64fa5872-7787-4029-b9ce-a33b115c5858" containerID="ac2746820e93d3dca61b09cd896fe5202274313fd3939ebad5d2142e0686875d" exitCode=0 Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.605644 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5f7jd" event={"ID":"64fa5872-7787-4029-b9ce-a33b115c5858","Type":"ContainerDied","Data":"ac2746820e93d3dca61b09cd896fe5202274313fd3939ebad5d2142e0686875d"} Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.605693 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5f7jd" event={"ID":"64fa5872-7787-4029-b9ce-a33b115c5858","Type":"ContainerStarted","Data":"dd97e355c8c63d8b403e9d5cf91b961ba975594e9795e9f1c8591f3ec0793069"} Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.607967 4902 generic.go:334] "Generic (PLEG): container finished" podID="da153901-c3b2-469c-944b-48209050c85c" containerID="483a36eabcdbe2b52f7778858d305f508bfc65ff0090f681d7b97610280b4e98" exitCode=0 Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.612229 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-th6wm" event={"ID":"da153901-c3b2-469c-944b-48209050c85c","Type":"ContainerDied","Data":"483a36eabcdbe2b52f7778858d305f508bfc65ff0090f681d7b97610280b4e98"} Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.613629 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-75k7n" podStartSLOduration=4.613606684 podStartE2EDuration="4.613606684s" podCreationTimestamp="2025-12-02 14:37:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:37:18.573045704 +0000 UTC m=+1269.764354413" watchObservedRunningTime="2025-12-02 14:37:18.613606684 +0000 UTC m=+1269.804915393" Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.649437 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-42af-account-create-update-dg87s" podStartSLOduration=4.64936755 podStartE2EDuration="4.64936755s" podCreationTimestamp="2025-12-02 14:37:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:37:18.594386114 +0000 UTC m=+1269.785694813" watchObservedRunningTime="2025-12-02 14:37:18.64936755 +0000 UTC m=+1269.840676259" Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.847921 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.849620 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-65c8455667-7wzb6" Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.978024 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-54b4cc97b8-h6x6n" podUID="e27977c3-7e5e-47b7-b217-96acb16119c2" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 14:37:18 crc kubenswrapper[4902]: I1202 14:37:18.978069 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-54b4cc97b8-h6x6n" podUID="e27977c3-7e5e-47b7-b217-96acb16119c2" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 14:37:19 crc kubenswrapper[4902]: I1202 14:37:19.143508 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e27977c3-7e5e-47b7-b217-96acb16119c2" path="/var/lib/kubelet/pods/e27977c3-7e5e-47b7-b217-96acb16119c2/volumes" Dec 02 14:37:19 crc kubenswrapper[4902]: I1202 14:37:19.148002 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f67dc251-e88f-4954-9410-bacba8c2b367" path="/var/lib/kubelet/pods/f67dc251-e88f-4954-9410-bacba8c2b367/volumes" Dec 02 14:37:19 crc kubenswrapper[4902]: I1202 14:37:19.623291 4902 generic.go:334] "Generic (PLEG): container finished" podID="2bcd69e0-8303-46f6-bef1-37faec667798" containerID="552dfcf87005ca59310662896ad001ab43c897dbee455e5e12df34fa48d0fa4f" exitCode=0 Dec 02 14:37:19 crc kubenswrapper[4902]: I1202 14:37:19.624214 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4a51-account-create-update-4fmkl" event={"ID":"2bcd69e0-8303-46f6-bef1-37faec667798","Type":"ContainerDied","Data":"552dfcf87005ca59310662896ad001ab43c897dbee455e5e12df34fa48d0fa4f"} Dec 02 14:37:19 crc kubenswrapper[4902]: I1202 14:37:19.625177 4902 generic.go:334] "Generic (PLEG): container finished" podID="e4c64af0-fa3b-4607-be3b-cda80bc8831c" containerID="08bd7f1c99f21758ef5105c0056679303417279d87a42fb34043709597a53525" exitCode=0 Dec 02 14:37:19 crc kubenswrapper[4902]: I1202 14:37:19.625233 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-75k7n" event={"ID":"e4c64af0-fa3b-4607-be3b-cda80bc8831c","Type":"ContainerDied","Data":"08bd7f1c99f21758ef5105c0056679303417279d87a42fb34043709597a53525"} Dec 02 14:37:19 crc kubenswrapper[4902]: I1202 14:37:19.626627 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"d5838a49-52eb-4485-8eda-14772a0f60bd","Type":"ContainerStarted","Data":"77a5fd04faf81755917bfc6d8f977408e42b70cd04e51593e1127cdf5135ac14"} Dec 02 14:37:19 crc kubenswrapper[4902]: I1202 14:37:19.631257 4902 generic.go:334] "Generic (PLEG): container finished" podID="5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe" containerID="f7048a9cf7f52b14f09d41227b59b83e7947541cc16f806fa1acf9327b70ea9a" exitCode=0 Dec 02 14:37:19 crc kubenswrapper[4902]: I1202 14:37:19.631329 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-42af-account-create-update-dg87s" event={"ID":"5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe","Type":"ContainerDied","Data":"f7048a9cf7f52b14f09d41227b59b83e7947541cc16f806fa1acf9327b70ea9a"} Dec 02 14:37:19 crc kubenswrapper[4902]: I1202 14:37:19.699278 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=2.699259944 podStartE2EDuration="2.699259944s" podCreationTimestamp="2025-12-02 14:37:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:37:19.694813469 +0000 UTC m=+1270.886122178" watchObservedRunningTime="2025-12-02 14:37:19.699259944 +0000 UTC m=+1270.890568653" Dec 02 14:37:19 crc kubenswrapper[4902]: I1202 14:37:19.931369 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 14:37:19 crc kubenswrapper[4902]: I1202 14:37:19.931649 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="fe71c826-cad9-405b-824e-f5a856927efe" containerName="glance-log" containerID="cri-o://dd5511618452c435125a4f28118386ece9aa4f051f72ee4098c53439368265db" gracePeriod=30 Dec 02 14:37:19 crc kubenswrapper[4902]: I1202 14:37:19.931780 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="fe71c826-cad9-405b-824e-f5a856927efe" containerName="glance-httpd" containerID="cri-o://028522c4ff2f102fae123d041522c0e99ff16155b56b27fcab42d83780cb6075" gracePeriod=30 Dec 02 14:37:19 crc kubenswrapper[4902]: I1202 14:37:19.992595 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5f7jd" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.099450 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64fa5872-7787-4029-b9ce-a33b115c5858-operator-scripts\") pod \"64fa5872-7787-4029-b9ce-a33b115c5858\" (UID: \"64fa5872-7787-4029-b9ce-a33b115c5858\") " Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.099516 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq5lg\" (UniqueName: \"kubernetes.io/projected/64fa5872-7787-4029-b9ce-a33b115c5858-kube-api-access-mq5lg\") pod \"64fa5872-7787-4029-b9ce-a33b115c5858\" (UID: \"64fa5872-7787-4029-b9ce-a33b115c5858\") " Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.102837 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64fa5872-7787-4029-b9ce-a33b115c5858-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "64fa5872-7787-4029-b9ce-a33b115c5858" (UID: "64fa5872-7787-4029-b9ce-a33b115c5858"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.112653 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64fa5872-7787-4029-b9ce-a33b115c5858-kube-api-access-mq5lg" (OuterVolumeSpecName: "kube-api-access-mq5lg") pod "64fa5872-7787-4029-b9ce-a33b115c5858" (UID: "64fa5872-7787-4029-b9ce-a33b115c5858"). InnerVolumeSpecName "kube-api-access-mq5lg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.176114 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5e4d-account-create-update-xvr22" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.181588 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-th6wm" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.201414 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0eb390fd-b392-4ec8-990b-7ed540083355-operator-scripts\") pod \"0eb390fd-b392-4ec8-990b-7ed540083355\" (UID: \"0eb390fd-b392-4ec8-990b-7ed540083355\") " Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.201543 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4k9gv\" (UniqueName: \"kubernetes.io/projected/da153901-c3b2-469c-944b-48209050c85c-kube-api-access-4k9gv\") pod \"da153901-c3b2-469c-944b-48209050c85c\" (UID: \"da153901-c3b2-469c-944b-48209050c85c\") " Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.201593 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pv5ps\" (UniqueName: \"kubernetes.io/projected/0eb390fd-b392-4ec8-990b-7ed540083355-kube-api-access-pv5ps\") pod \"0eb390fd-b392-4ec8-990b-7ed540083355\" (UID: \"0eb390fd-b392-4ec8-990b-7ed540083355\") " Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.201738 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da153901-c3b2-469c-944b-48209050c85c-operator-scripts\") pod \"da153901-c3b2-469c-944b-48209050c85c\" (UID: \"da153901-c3b2-469c-944b-48209050c85c\") " Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.202276 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64fa5872-7787-4029-b9ce-a33b115c5858-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.202294 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq5lg\" (UniqueName: \"kubernetes.io/projected/64fa5872-7787-4029-b9ce-a33b115c5858-kube-api-access-mq5lg\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.202965 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da153901-c3b2-469c-944b-48209050c85c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "da153901-c3b2-469c-944b-48209050c85c" (UID: "da153901-c3b2-469c-944b-48209050c85c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.203308 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0eb390fd-b392-4ec8-990b-7ed540083355-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0eb390fd-b392-4ec8-990b-7ed540083355" (UID: "0eb390fd-b392-4ec8-990b-7ed540083355"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.206148 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da153901-c3b2-469c-944b-48209050c85c-kube-api-access-4k9gv" (OuterVolumeSpecName: "kube-api-access-4k9gv") pod "da153901-c3b2-469c-944b-48209050c85c" (UID: "da153901-c3b2-469c-944b-48209050c85c"). InnerVolumeSpecName "kube-api-access-4k9gv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.206192 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eb390fd-b392-4ec8-990b-7ed540083355-kube-api-access-pv5ps" (OuterVolumeSpecName: "kube-api-access-pv5ps") pod "0eb390fd-b392-4ec8-990b-7ed540083355" (UID: "0eb390fd-b392-4ec8-990b-7ed540083355"). InnerVolumeSpecName "kube-api-access-pv5ps". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:20 crc kubenswrapper[4902]: E1202 14:37:20.303459 4902 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/77cb0da3e423aac8cfe65e5f82f7593acef5999a752db492bc6dcdc847ea93ab/diff" to get inode usage: stat /var/lib/containers/storage/overlay/77cb0da3e423aac8cfe65e5f82f7593acef5999a752db492bc6dcdc847ea93ab/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_watcher-decision-engine-0_f67dc251-e88f-4954-9410-bacba8c2b367/watcher-decision-engine/0.log" to get inode usage: stat /var/log/pods/openstack_watcher-decision-engine-0_f67dc251-e88f-4954-9410-bacba8c2b367/watcher-decision-engine/0.log: no such file or directory Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.304794 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da153901-c3b2-469c-944b-48209050c85c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.304833 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0eb390fd-b392-4ec8-990b-7ed540083355-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.304845 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4k9gv\" (UniqueName: \"kubernetes.io/projected/da153901-c3b2-469c-944b-48209050c85c-kube-api-access-4k9gv\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.304857 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pv5ps\" (UniqueName: \"kubernetes.io/projected/0eb390fd-b392-4ec8-990b-7ed540083355-kube-api-access-pv5ps\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.641112 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-th6wm" event={"ID":"da153901-c3b2-469c-944b-48209050c85c","Type":"ContainerDied","Data":"488838a0772fb4c51d96e5f24b0e211852750ffa8a79b7a01caee9b2966e61c8"} Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.641458 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="488838a0772fb4c51d96e5f24b0e211852750ffa8a79b7a01caee9b2966e61c8" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.641126 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-th6wm" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.642417 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5e4d-account-create-update-xvr22" event={"ID":"0eb390fd-b392-4ec8-990b-7ed540083355","Type":"ContainerDied","Data":"141eba3cfe3eeeee01b0588d4106765c774176438b94cbdbcac40d95e23def4f"} Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.642461 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="141eba3cfe3eeeee01b0588d4106765c774176438b94cbdbcac40d95e23def4f" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.642637 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5e4d-account-create-update-xvr22" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.643776 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5f7jd" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.643799 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5f7jd" event={"ID":"64fa5872-7787-4029-b9ce-a33b115c5858","Type":"ContainerDied","Data":"dd97e355c8c63d8b403e9d5cf91b961ba975594e9795e9f1c8591f3ec0793069"} Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.643839 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd97e355c8c63d8b403e9d5cf91b961ba975594e9795e9f1c8591f3ec0793069" Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.645634 4902 generic.go:334] "Generic (PLEG): container finished" podID="fe71c826-cad9-405b-824e-f5a856927efe" containerID="dd5511618452c435125a4f28118386ece9aa4f051f72ee4098c53439368265db" exitCode=143 Dec 02 14:37:20 crc kubenswrapper[4902]: I1202 14:37:20.645769 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fe71c826-cad9-405b-824e-f5a856927efe","Type":"ContainerDied","Data":"dd5511618452c435125a4f28118386ece9aa4f051f72ee4098c53439368265db"} Dec 02 14:37:20 crc kubenswrapper[4902]: W1202 14:37:20.889403 4902 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda153901_c3b2_469c_944b_48209050c85c.slice/crio-488838a0772fb4c51d96e5f24b0e211852750ffa8a79b7a01caee9b2966e61c8": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda153901_c3b2_469c_944b_48209050c85c.slice/crio-488838a0772fb4c51d96e5f24b0e211852750ffa8a79b7a01caee9b2966e61c8: no such file or directory Dec 02 14:37:20 crc kubenswrapper[4902]: W1202 14:37:20.889695 4902 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda153901_c3b2_469c_944b_48209050c85c.slice/crio-conmon-483a36eabcdbe2b52f7778858d305f508bfc65ff0090f681d7b97610280b4e98.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda153901_c3b2_469c_944b_48209050c85c.slice/crio-conmon-483a36eabcdbe2b52f7778858d305f508bfc65ff0090f681d7b97610280b4e98.scope: no such file or directory Dec 02 14:37:20 crc kubenswrapper[4902]: W1202 14:37:20.890936 4902 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda153901_c3b2_469c_944b_48209050c85c.slice/crio-483a36eabcdbe2b52f7778858d305f508bfc65ff0090f681d7b97610280b4e98.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda153901_c3b2_469c_944b_48209050c85c.slice/crio-483a36eabcdbe2b52f7778858d305f508bfc65ff0090f681d7b97610280b4e98.scope: no such file or directory Dec 02 14:37:20 crc kubenswrapper[4902]: W1202 14:37:20.891157 4902 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64fa5872_7787_4029_b9ce_a33b115c5858.slice/crio-dd97e355c8c63d8b403e9d5cf91b961ba975594e9795e9f1c8591f3ec0793069": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64fa5872_7787_4029_b9ce_a33b115c5858.slice/crio-dd97e355c8c63d8b403e9d5cf91b961ba975594e9795e9f1c8591f3ec0793069: no such file or directory Dec 02 14:37:20 crc kubenswrapper[4902]: W1202 14:37:20.891366 4902 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0eb390fd_b392_4ec8_990b_7ed540083355.slice/crio-141eba3cfe3eeeee01b0588d4106765c774176438b94cbdbcac40d95e23def4f": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0eb390fd_b392_4ec8_990b_7ed540083355.slice/crio-141eba3cfe3eeeee01b0588d4106765c774176438b94cbdbcac40d95e23def4f: no such file or directory Dec 02 14:37:20 crc kubenswrapper[4902]: W1202 14:37:20.892322 4902 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64fa5872_7787_4029_b9ce_a33b115c5858.slice/crio-conmon-ac2746820e93d3dca61b09cd896fe5202274313fd3939ebad5d2142e0686875d.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64fa5872_7787_4029_b9ce_a33b115c5858.slice/crio-conmon-ac2746820e93d3dca61b09cd896fe5202274313fd3939ebad5d2142e0686875d.scope: no such file or directory Dec 02 14:37:20 crc kubenswrapper[4902]: W1202 14:37:20.892630 4902 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0eb390fd_b392_4ec8_990b_7ed540083355.slice/crio-conmon-b78f5a1273a9946fd83c1f5d806d27ab77dbe36d7052343a8b3e9835f6adb6bd.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0eb390fd_b392_4ec8_990b_7ed540083355.slice/crio-conmon-b78f5a1273a9946fd83c1f5d806d27ab77dbe36d7052343a8b3e9835f6adb6bd.scope: no such file or directory Dec 02 14:37:20 crc kubenswrapper[4902]: W1202 14:37:20.892971 4902 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64fa5872_7787_4029_b9ce_a33b115c5858.slice/crio-ac2746820e93d3dca61b09cd896fe5202274313fd3939ebad5d2142e0686875d.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64fa5872_7787_4029_b9ce_a33b115c5858.slice/crio-ac2746820e93d3dca61b09cd896fe5202274313fd3939ebad5d2142e0686875d.scope: no such file or directory Dec 02 14:37:20 crc kubenswrapper[4902]: W1202 14:37:20.893121 4902 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4c64af0_fa3b_4607_be3b_cda80bc8831c.slice/crio-conmon-08bd7f1c99f21758ef5105c0056679303417279d87a42fb34043709597a53525.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4c64af0_fa3b_4607_be3b_cda80bc8831c.slice/crio-conmon-08bd7f1c99f21758ef5105c0056679303417279d87a42fb34043709597a53525.scope: no such file or directory Dec 02 14:37:20 crc kubenswrapper[4902]: W1202 14:37:20.893213 4902 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0eb390fd_b392_4ec8_990b_7ed540083355.slice/crio-b78f5a1273a9946fd83c1f5d806d27ab77dbe36d7052343a8b3e9835f6adb6bd.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0eb390fd_b392_4ec8_990b_7ed540083355.slice/crio-b78f5a1273a9946fd83c1f5d806d27ab77dbe36d7052343a8b3e9835f6adb6bd.scope: no such file or directory Dec 02 14:37:20 crc kubenswrapper[4902]: W1202 14:37:20.893591 4902 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ccd2d74_fe45_4bfe_9ac4_d525c23d4fbe.slice/crio-conmon-f7048a9cf7f52b14f09d41227b59b83e7947541cc16f806fa1acf9327b70ea9a.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ccd2d74_fe45_4bfe_9ac4_d525c23d4fbe.slice/crio-conmon-f7048a9cf7f52b14f09d41227b59b83e7947541cc16f806fa1acf9327b70ea9a.scope: no such file or directory Dec 02 14:37:20 crc kubenswrapper[4902]: W1202 14:37:20.893720 4902 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4c64af0_fa3b_4607_be3b_cda80bc8831c.slice/crio-08bd7f1c99f21758ef5105c0056679303417279d87a42fb34043709597a53525.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4c64af0_fa3b_4607_be3b_cda80bc8831c.slice/crio-08bd7f1c99f21758ef5105c0056679303417279d87a42fb34043709597a53525.scope: no such file or directory Dec 02 14:37:20 crc kubenswrapper[4902]: W1202 14:37:20.893838 4902 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2bcd69e0_8303_46f6_bef1_37faec667798.slice/crio-conmon-552dfcf87005ca59310662896ad001ab43c897dbee455e5e12df34fa48d0fa4f.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2bcd69e0_8303_46f6_bef1_37faec667798.slice/crio-conmon-552dfcf87005ca59310662896ad001ab43c897dbee455e5e12df34fa48d0fa4f.scope: no such file or directory Dec 02 14:37:20 crc kubenswrapper[4902]: W1202 14:37:20.893937 4902 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2bcd69e0_8303_46f6_bef1_37faec667798.slice/crio-552dfcf87005ca59310662896ad001ab43c897dbee455e5e12df34fa48d0fa4f.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2bcd69e0_8303_46f6_bef1_37faec667798.slice/crio-552dfcf87005ca59310662896ad001ab43c897dbee455e5e12df34fa48d0fa4f.scope: no such file or directory Dec 02 14:37:20 crc kubenswrapper[4902]: W1202 14:37:20.894038 4902 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ccd2d74_fe45_4bfe_9ac4_d525c23d4fbe.slice/crio-f7048a9cf7f52b14f09d41227b59b83e7947541cc16f806fa1acf9327b70ea9a.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ccd2d74_fe45_4bfe_9ac4_d525c23d4fbe.slice/crio-f7048a9cf7f52b14f09d41227b59b83e7947541cc16f806fa1acf9327b70ea9a.scope: no such file or directory Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.242994 4902 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod97eb051a-3187-447f-8bf6-71ae3d8f65d7"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod97eb051a-3187-447f-8bf6-71ae3d8f65d7] : Timed out while waiting for systemd to remove kubepods-besteffort-pod97eb051a_3187_447f_8bf6_71ae3d8f65d7.slice" Dec 02 14:37:21 crc kubenswrapper[4902]: E1202 14:37:21.243276 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod97eb051a-3187-447f-8bf6-71ae3d8f65d7] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod97eb051a-3187-447f-8bf6-71ae3d8f65d7] : Timed out while waiting for systemd to remove kubepods-besteffort-pod97eb051a_3187_447f_8bf6_71ae3d8f65d7.slice" pod="openstack/ceilometer-0" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.256937 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-42af-account-create-update-dg87s" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.327925 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe-operator-scripts\") pod \"5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe\" (UID: \"5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe\") " Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.327977 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbxm7\" (UniqueName: \"kubernetes.io/projected/5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe-kube-api-access-bbxm7\") pod \"5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe\" (UID: \"5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe\") " Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.331347 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe" (UID: "5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.333866 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe-kube-api-access-bbxm7" (OuterVolumeSpecName: "kube-api-access-bbxm7") pod "5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe" (UID: "5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe"). InnerVolumeSpecName "kube-api-access-bbxm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.365261 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4a51-account-create-update-4fmkl" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.366607 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-75k7n" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.432613 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.432646 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbxm7\" (UniqueName: \"kubernetes.io/projected/5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe-kube-api-access-bbxm7\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.533263 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jzff\" (UniqueName: \"kubernetes.io/projected/2bcd69e0-8303-46f6-bef1-37faec667798-kube-api-access-2jzff\") pod \"2bcd69e0-8303-46f6-bef1-37faec667798\" (UID: \"2bcd69e0-8303-46f6-bef1-37faec667798\") " Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.533382 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4c64af0-fa3b-4607-be3b-cda80bc8831c-operator-scripts\") pod \"e4c64af0-fa3b-4607-be3b-cda80bc8831c\" (UID: \"e4c64af0-fa3b-4607-be3b-cda80bc8831c\") " Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.533410 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bcd69e0-8303-46f6-bef1-37faec667798-operator-scripts\") pod \"2bcd69e0-8303-46f6-bef1-37faec667798\" (UID: \"2bcd69e0-8303-46f6-bef1-37faec667798\") " Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.533576 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgdmk\" (UniqueName: \"kubernetes.io/projected/e4c64af0-fa3b-4607-be3b-cda80bc8831c-kube-api-access-sgdmk\") pod \"e4c64af0-fa3b-4607-be3b-cda80bc8831c\" (UID: \"e4c64af0-fa3b-4607-be3b-cda80bc8831c\") " Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.534051 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4c64af0-fa3b-4607-be3b-cda80bc8831c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e4c64af0-fa3b-4607-be3b-cda80bc8831c" (UID: "e4c64af0-fa3b-4607-be3b-cda80bc8831c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.534105 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bcd69e0-8303-46f6-bef1-37faec667798-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2bcd69e0-8303-46f6-bef1-37faec667798" (UID: "2bcd69e0-8303-46f6-bef1-37faec667798"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.541779 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bcd69e0-8303-46f6-bef1-37faec667798-kube-api-access-2jzff" (OuterVolumeSpecName: "kube-api-access-2jzff") pod "2bcd69e0-8303-46f6-bef1-37faec667798" (UID: "2bcd69e0-8303-46f6-bef1-37faec667798"). InnerVolumeSpecName "kube-api-access-2jzff". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.542690 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4c64af0-fa3b-4607-be3b-cda80bc8831c-kube-api-access-sgdmk" (OuterVolumeSpecName: "kube-api-access-sgdmk") pod "e4c64af0-fa3b-4607-be3b-cda80bc8831c" (UID: "e4c64af0-fa3b-4607-be3b-cda80bc8831c"). InnerVolumeSpecName "kube-api-access-sgdmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.635415 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jzff\" (UniqueName: \"kubernetes.io/projected/2bcd69e0-8303-46f6-bef1-37faec667798-kube-api-access-2jzff\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.635451 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4c64af0-fa3b-4607-be3b-cda80bc8831c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.635460 4902 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bcd69e0-8303-46f6-bef1-37faec667798-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.635468 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgdmk\" (UniqueName: \"kubernetes.io/projected/e4c64af0-fa3b-4607-be3b-cda80bc8831c-kube-api-access-sgdmk\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.657253 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-42af-account-create-update-dg87s" event={"ID":"5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe","Type":"ContainerDied","Data":"fe2adabc2558bb834af040915f857f3df9099e24572730fad555d435d6d3a070"} Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.657291 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe2adabc2558bb834af040915f857f3df9099e24572730fad555d435d6d3a070" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.657290 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-42af-account-create-update-dg87s" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.658728 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4a51-account-create-update-4fmkl" event={"ID":"2bcd69e0-8303-46f6-bef1-37faec667798","Type":"ContainerDied","Data":"d1499eb1dc08d49f59921c19e73c40139c1af7dd7e2059454a6c8cff6d7fc64f"} Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.658754 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1499eb1dc08d49f59921c19e73c40139c1af7dd7e2059454a6c8cff6d7fc64f" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.658785 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4a51-account-create-update-4fmkl" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.661296 4902 generic.go:334] "Generic (PLEG): container finished" podID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerID="462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924" exitCode=137 Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.661360 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85bf5878d8-bn7cr" event={"ID":"6eab04a1-6320-449e-9f31-600aa46a57b0","Type":"ContainerDied","Data":"462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924"} Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.661401 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85bf5878d8-bn7cr" event={"ID":"6eab04a1-6320-449e-9f31-600aa46a57b0","Type":"ContainerStarted","Data":"27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f"} Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.661442 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-85bf5878d8-bn7cr" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerName="horizon-log" containerID="cri-o://b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8" gracePeriod=30 Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.661525 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-85bf5878d8-bn7cr" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerName="horizon" containerID="cri-o://27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f" gracePeriod=30 Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.668137 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.668142 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-75k7n" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.668153 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-75k7n" event={"ID":"e4c64af0-fa3b-4607-be3b-cda80bc8831c","Type":"ContainerDied","Data":"ec8f323e725b528ecb44e0aaddfd2a8da263cea053f59a03836356cdfaeede81"} Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.669064 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec8f323e725b528ecb44e0aaddfd2a8da263cea053f59a03836356cdfaeede81" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.734622 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.745280 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.766185 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:21 crc kubenswrapper[4902]: E1202 14:37:21.766550 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da153901-c3b2-469c-944b-48209050c85c" containerName="mariadb-database-create" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.766578 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="da153901-c3b2-469c-944b-48209050c85c" containerName="mariadb-database-create" Dec 02 14:37:21 crc kubenswrapper[4902]: E1202 14:37:21.766589 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe" containerName="mariadb-account-create-update" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.766596 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe" containerName="mariadb-account-create-update" Dec 02 14:37:21 crc kubenswrapper[4902]: E1202 14:37:21.766620 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64fa5872-7787-4029-b9ce-a33b115c5858" containerName="mariadb-database-create" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.766626 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="64fa5872-7787-4029-b9ce-a33b115c5858" containerName="mariadb-database-create" Dec 02 14:37:21 crc kubenswrapper[4902]: E1202 14:37:21.766643 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb390fd-b392-4ec8-990b-7ed540083355" containerName="mariadb-account-create-update" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.766649 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb390fd-b392-4ec8-990b-7ed540083355" containerName="mariadb-account-create-update" Dec 02 14:37:21 crc kubenswrapper[4902]: E1202 14:37:21.766659 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4c64af0-fa3b-4607-be3b-cda80bc8831c" containerName="mariadb-database-create" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.766666 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4c64af0-fa3b-4607-be3b-cda80bc8831c" containerName="mariadb-database-create" Dec 02 14:37:21 crc kubenswrapper[4902]: E1202 14:37:21.766674 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bcd69e0-8303-46f6-bef1-37faec667798" containerName="mariadb-account-create-update" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.766680 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bcd69e0-8303-46f6-bef1-37faec667798" containerName="mariadb-account-create-update" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.766849 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe" containerName="mariadb-account-create-update" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.766863 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="64fa5872-7787-4029-b9ce-a33b115c5858" containerName="mariadb-database-create" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.766871 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eb390fd-b392-4ec8-990b-7ed540083355" containerName="mariadb-account-create-update" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.766883 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bcd69e0-8303-46f6-bef1-37faec667798" containerName="mariadb-account-create-update" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.766896 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4c64af0-fa3b-4607-be3b-cda80bc8831c" containerName="mariadb-database-create" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.766906 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="da153901-c3b2-469c-944b-48209050c85c" containerName="mariadb-database-create" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.768519 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.770890 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.771667 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.826803 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.838337 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-config-data\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.838397 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.838585 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.838655 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sn5tz\" (UniqueName: \"kubernetes.io/projected/2efdd190-8701-4e86-9e8e-57a2f848e23d-kube-api-access-sn5tz\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.838723 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efdd190-8701-4e86-9e8e-57a2f848e23d-log-httpd\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.838742 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efdd190-8701-4e86-9e8e-57a2f848e23d-run-httpd\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.838858 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-scripts\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.940347 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-config-data\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.940388 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.940438 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.940461 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sn5tz\" (UniqueName: \"kubernetes.io/projected/2efdd190-8701-4e86-9e8e-57a2f848e23d-kube-api-access-sn5tz\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.940491 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efdd190-8701-4e86-9e8e-57a2f848e23d-run-httpd\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.940504 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efdd190-8701-4e86-9e8e-57a2f848e23d-log-httpd\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.940552 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-scripts\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.941067 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efdd190-8701-4e86-9e8e-57a2f848e23d-run-httpd\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.941091 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efdd190-8701-4e86-9e8e-57a2f848e23d-log-httpd\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.945528 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.945967 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-config-data\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.947282 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.949260 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-scripts\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:21 crc kubenswrapper[4902]: I1202 14:37:21.959152 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sn5tz\" (UniqueName: \"kubernetes.io/projected/2efdd190-8701-4e86-9e8e-57a2f848e23d-kube-api-access-sn5tz\") pod \"ceilometer-0\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " pod="openstack/ceilometer-0" Dec 02 14:37:22 crc kubenswrapper[4902]: I1202 14:37:22.095065 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:37:22 crc kubenswrapper[4902]: W1202 14:37:22.567147 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2efdd190_8701_4e86_9e8e_57a2f848e23d.slice/crio-51759ed7e262a857e6cdd9e611be584a46a44ec11886800e57c5898e9ac362cc WatchSource:0}: Error finding container 51759ed7e262a857e6cdd9e611be584a46a44ec11886800e57c5898e9ac362cc: Status 404 returned error can't find the container with id 51759ed7e262a857e6cdd9e611be584a46a44ec11886800e57c5898e9ac362cc Dec 02 14:37:22 crc kubenswrapper[4902]: I1202 14:37:22.572696 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:22 crc kubenswrapper[4902]: I1202 14:37:22.678348 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efdd190-8701-4e86-9e8e-57a2f848e23d","Type":"ContainerStarted","Data":"51759ed7e262a857e6cdd9e611be584a46a44ec11886800e57c5898e9ac362cc"} Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.124102 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97eb051a-3187-447f-8bf6-71ae3d8f65d7" path="/var/lib/kubelet/pods/97eb051a-3187-447f-8bf6-71ae3d8f65d7/volumes" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.470407 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.470959 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="1d55b635-d5b6-4a77-9019-f8741adc7a93" containerName="glance-log" containerID="cri-o://7f9f473ac07c4df89e1bdc2cec14a94204e2ccc14412f2c05235b343422f9e59" gracePeriod=30 Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.471335 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="1d55b635-d5b6-4a77-9019-f8741adc7a93" containerName="glance-httpd" containerID="cri-o://b1932c4ae1b856f2ee864e26a86aec06defff0bd97dcf0b546274265c8f87a61" gracePeriod=30 Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.625754 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.695239 4902 generic.go:334] "Generic (PLEG): container finished" podID="fe71c826-cad9-405b-824e-f5a856927efe" containerID="028522c4ff2f102fae123d041522c0e99ff16155b56b27fcab42d83780cb6075" exitCode=0 Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.695308 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fe71c826-cad9-405b-824e-f5a856927efe","Type":"ContainerDied","Data":"028522c4ff2f102fae123d041522c0e99ff16155b56b27fcab42d83780cb6075"} Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.695335 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fe71c826-cad9-405b-824e-f5a856927efe","Type":"ContainerDied","Data":"fcedf88a9e1c902761c47499b48925d6f380d576584c3d86b975d94cd7f9cfa5"} Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.695352 4902 scope.go:117] "RemoveContainer" containerID="028522c4ff2f102fae123d041522c0e99ff16155b56b27fcab42d83780cb6075" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.695470 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.698426 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efdd190-8701-4e86-9e8e-57a2f848e23d","Type":"ContainerStarted","Data":"ecad0b367efcd102591f32d8fd2637d1f5819f5bc05ade8dfb4a3128ae5dc910"} Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.708645 4902 generic.go:334] "Generic (PLEG): container finished" podID="1d55b635-d5b6-4a77-9019-f8741adc7a93" containerID="7f9f473ac07c4df89e1bdc2cec14a94204e2ccc14412f2c05235b343422f9e59" exitCode=143 Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.708971 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1d55b635-d5b6-4a77-9019-f8741adc7a93","Type":"ContainerDied","Data":"7f9f473ac07c4df89e1bdc2cec14a94204e2ccc14412f2c05235b343422f9e59"} Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.736298 4902 scope.go:117] "RemoveContainer" containerID="dd5511618452c435125a4f28118386ece9aa4f051f72ee4098c53439368265db" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.775575 4902 scope.go:117] "RemoveContainer" containerID="028522c4ff2f102fae123d041522c0e99ff16155b56b27fcab42d83780cb6075" Dec 02 14:37:23 crc kubenswrapper[4902]: E1202 14:37:23.776096 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"028522c4ff2f102fae123d041522c0e99ff16155b56b27fcab42d83780cb6075\": container with ID starting with 028522c4ff2f102fae123d041522c0e99ff16155b56b27fcab42d83780cb6075 not found: ID does not exist" containerID="028522c4ff2f102fae123d041522c0e99ff16155b56b27fcab42d83780cb6075" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.776140 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"028522c4ff2f102fae123d041522c0e99ff16155b56b27fcab42d83780cb6075"} err="failed to get container status \"028522c4ff2f102fae123d041522c0e99ff16155b56b27fcab42d83780cb6075\": rpc error: code = NotFound desc = could not find container \"028522c4ff2f102fae123d041522c0e99ff16155b56b27fcab42d83780cb6075\": container with ID starting with 028522c4ff2f102fae123d041522c0e99ff16155b56b27fcab42d83780cb6075 not found: ID does not exist" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.776168 4902 scope.go:117] "RemoveContainer" containerID="dd5511618452c435125a4f28118386ece9aa4f051f72ee4098c53439368265db" Dec 02 14:37:23 crc kubenswrapper[4902]: E1202 14:37:23.776657 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd5511618452c435125a4f28118386ece9aa4f051f72ee4098c53439368265db\": container with ID starting with dd5511618452c435125a4f28118386ece9aa4f051f72ee4098c53439368265db not found: ID does not exist" containerID="dd5511618452c435125a4f28118386ece9aa4f051f72ee4098c53439368265db" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.776707 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd5511618452c435125a4f28118386ece9aa4f051f72ee4098c53439368265db"} err="failed to get container status \"dd5511618452c435125a4f28118386ece9aa4f051f72ee4098c53439368265db\": rpc error: code = NotFound desc = could not find container \"dd5511618452c435125a4f28118386ece9aa4f051f72ee4098c53439368265db\": container with ID starting with dd5511618452c435125a4f28118386ece9aa4f051f72ee4098c53439368265db not found: ID does not exist" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.779294 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fe71c826-cad9-405b-824e-f5a856927efe-httpd-run\") pod \"fe71c826-cad9-405b-824e-f5a856927efe\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.779386 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe71c826-cad9-405b-824e-f5a856927efe-logs\") pod \"fe71c826-cad9-405b-824e-f5a856927efe\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.779412 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-config-data\") pod \"fe71c826-cad9-405b-824e-f5a856927efe\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.779539 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l28m9\" (UniqueName: \"kubernetes.io/projected/fe71c826-cad9-405b-824e-f5a856927efe-kube-api-access-l28m9\") pod \"fe71c826-cad9-405b-824e-f5a856927efe\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.779655 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-combined-ca-bundle\") pod \"fe71c826-cad9-405b-824e-f5a856927efe\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.779778 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-scripts\") pod \"fe71c826-cad9-405b-824e-f5a856927efe\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.779775 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe71c826-cad9-405b-824e-f5a856927efe-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "fe71c826-cad9-405b-824e-f5a856927efe" (UID: "fe71c826-cad9-405b-824e-f5a856927efe"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.779801 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"fe71c826-cad9-405b-824e-f5a856927efe\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.779825 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-public-tls-certs\") pod \"fe71c826-cad9-405b-824e-f5a856927efe\" (UID: \"fe71c826-cad9-405b-824e-f5a856927efe\") " Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.780351 4902 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fe71c826-cad9-405b-824e-f5a856927efe-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.780595 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe71c826-cad9-405b-824e-f5a856927efe-logs" (OuterVolumeSpecName: "logs") pod "fe71c826-cad9-405b-824e-f5a856927efe" (UID: "fe71c826-cad9-405b-824e-f5a856927efe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.784703 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "fe71c826-cad9-405b-824e-f5a856927efe" (UID: "fe71c826-cad9-405b-824e-f5a856927efe"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.785023 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-scripts" (OuterVolumeSpecName: "scripts") pod "fe71c826-cad9-405b-824e-f5a856927efe" (UID: "fe71c826-cad9-405b-824e-f5a856927efe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.789691 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe71c826-cad9-405b-824e-f5a856927efe-kube-api-access-l28m9" (OuterVolumeSpecName: "kube-api-access-l28m9") pod "fe71c826-cad9-405b-824e-f5a856927efe" (UID: "fe71c826-cad9-405b-824e-f5a856927efe"). InnerVolumeSpecName "kube-api-access-l28m9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.811910 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fe71c826-cad9-405b-824e-f5a856927efe" (UID: "fe71c826-cad9-405b-824e-f5a856927efe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.844715 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "fe71c826-cad9-405b-824e-f5a856927efe" (UID: "fe71c826-cad9-405b-824e-f5a856927efe"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.864402 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-config-data" (OuterVolumeSpecName: "config-data") pod "fe71c826-cad9-405b-824e-f5a856927efe" (UID: "fe71c826-cad9-405b-824e-f5a856927efe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.881452 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l28m9\" (UniqueName: \"kubernetes.io/projected/fe71c826-cad9-405b-824e-f5a856927efe-kube-api-access-l28m9\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.881483 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.881512 4902 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.881523 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.881533 4902 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.881541 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe71c826-cad9-405b-824e-f5a856927efe-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.881549 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe71c826-cad9-405b-824e-f5a856927efe-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.901680 4902 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 02 14:37:23 crc kubenswrapper[4902]: I1202 14:37:23.983212 4902 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.026487 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.034122 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.049491 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 14:37:24 crc kubenswrapper[4902]: E1202 14:37:24.050091 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe71c826-cad9-405b-824e-f5a856927efe" containerName="glance-log" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.050162 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe71c826-cad9-405b-824e-f5a856927efe" containerName="glance-log" Dec 02 14:37:24 crc kubenswrapper[4902]: E1202 14:37:24.050251 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe71c826-cad9-405b-824e-f5a856927efe" containerName="glance-httpd" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.050304 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe71c826-cad9-405b-824e-f5a856927efe" containerName="glance-httpd" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.050527 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe71c826-cad9-405b-824e-f5a856927efe" containerName="glance-log" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.050605 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe71c826-cad9-405b-824e-f5a856927efe" containerName="glance-httpd" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.051605 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.054267 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.056836 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.073714 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.186833 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d0b949e-6c1b-4c5c-955e-53e6ab278555-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.187127 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbfkh\" (UniqueName: \"kubernetes.io/projected/3d0b949e-6c1b-4c5c-955e-53e6ab278555-kube-api-access-mbfkh\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.187230 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d0b949e-6c1b-4c5c-955e-53e6ab278555-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.187417 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d0b949e-6c1b-4c5c-955e-53e6ab278555-scripts\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.187617 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d0b949e-6c1b-4c5c-955e-53e6ab278555-logs\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.187693 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3d0b949e-6c1b-4c5c-955e-53e6ab278555-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.188008 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d0b949e-6c1b-4c5c-955e-53e6ab278555-config-data\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.188187 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.290383 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d0b949e-6c1b-4c5c-955e-53e6ab278555-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.290752 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d0b949e-6c1b-4c5c-955e-53e6ab278555-scripts\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.290977 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d0b949e-6c1b-4c5c-955e-53e6ab278555-logs\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.291145 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3d0b949e-6c1b-4c5c-955e-53e6ab278555-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.291366 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d0b949e-6c1b-4c5c-955e-53e6ab278555-config-data\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.291518 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3d0b949e-6c1b-4c5c-955e-53e6ab278555-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.291699 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.291953 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d0b949e-6c1b-4c5c-955e-53e6ab278555-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.292129 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbfkh\" (UniqueName: \"kubernetes.io/projected/3d0b949e-6c1b-4c5c-955e-53e6ab278555-kube-api-access-mbfkh\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.292322 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d0b949e-6c1b-4c5c-955e-53e6ab278555-logs\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.292008 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.300120 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d0b949e-6c1b-4c5c-955e-53e6ab278555-scripts\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.301764 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d0b949e-6c1b-4c5c-955e-53e6ab278555-config-data\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.302619 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d0b949e-6c1b-4c5c-955e-53e6ab278555-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.303057 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d0b949e-6c1b-4c5c-955e-53e6ab278555-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.320390 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbfkh\" (UniqueName: \"kubernetes.io/projected/3d0b949e-6c1b-4c5c-955e-53e6ab278555-kube-api-access-mbfkh\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.323979 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"3d0b949e-6c1b-4c5c-955e-53e6ab278555\") " pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.420278 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.732200 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efdd190-8701-4e86-9e8e-57a2f848e23d","Type":"ContainerStarted","Data":"b775febc42e2ab041bbb08eb9e3f0db75897538f72b7827a4ccf4faa3dfe73c6"} Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.875680 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-2tflw"] Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.876978 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.890311 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-2tflw"] Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.892059 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-zfd96" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.892236 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.894009 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 02 14:37:24 crc kubenswrapper[4902]: I1202 14:37:24.959855 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.005764 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-scripts\") pod \"nova-cell0-conductor-db-sync-2tflw\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.005854 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bn2d4\" (UniqueName: \"kubernetes.io/projected/5f41756d-ba98-4fe9-9b76-7924f7879577-kube-api-access-bn2d4\") pod \"nova-cell0-conductor-db-sync-2tflw\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.006128 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-config-data\") pod \"nova-cell0-conductor-db-sync-2tflw\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.006362 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-2tflw\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.057325 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.108273 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bn2d4\" (UniqueName: \"kubernetes.io/projected/5f41756d-ba98-4fe9-9b76-7924f7879577-kube-api-access-bn2d4\") pod \"nova-cell0-conductor-db-sync-2tflw\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.108375 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-config-data\") pod \"nova-cell0-conductor-db-sync-2tflw\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.108462 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-2tflw\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.108518 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-scripts\") pod \"nova-cell0-conductor-db-sync-2tflw\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.119640 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-scripts\") pod \"nova-cell0-conductor-db-sync-2tflw\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.122918 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe71c826-cad9-405b-824e-f5a856927efe" path="/var/lib/kubelet/pods/fe71c826-cad9-405b-824e-f5a856927efe/volumes" Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.130631 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bn2d4\" (UniqueName: \"kubernetes.io/projected/5f41756d-ba98-4fe9-9b76-7924f7879577-kube-api-access-bn2d4\") pod \"nova-cell0-conductor-db-sync-2tflw\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.131922 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-config-data\") pod \"nova-cell0-conductor-db-sync-2tflw\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.132475 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-2tflw\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.246482 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.756635 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3d0b949e-6c1b-4c5c-955e-53e6ab278555","Type":"ContainerStarted","Data":"0ee2977aa88fa049c108d32a6d49d8171bf645bc963043520b255433d8ea2018"} Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.782862 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efdd190-8701-4e86-9e8e-57a2f848e23d","Type":"ContainerStarted","Data":"7a2290d06fedc19864e0c7553aa6c5c7f0bf13424f7b1c4020069f134c52fa4c"} Dec 02 14:37:25 crc kubenswrapper[4902]: I1202 14:37:25.979661 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-2tflw"] Dec 02 14:37:26 crc kubenswrapper[4902]: W1202 14:37:26.009895 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f41756d_ba98_4fe9_9b76_7924f7879577.slice/crio-4218591e684d52c2e963d5dff94719008b8cafc5528609391f54d79e093bbbbc WatchSource:0}: Error finding container 4218591e684d52c2e963d5dff94719008b8cafc5528609391f54d79e093bbbbc: Status 404 returned error can't find the container with id 4218591e684d52c2e963d5dff94719008b8cafc5528609391f54d79e093bbbbc Dec 02 14:37:26 crc kubenswrapper[4902]: I1202 14:37:26.795813 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3d0b949e-6c1b-4c5c-955e-53e6ab278555","Type":"ContainerStarted","Data":"2d614efbbb819a97cef8c00ef01cc278cd097d71e9c5210a0f399c2e432747ff"} Dec 02 14:37:26 crc kubenswrapper[4902]: I1202 14:37:26.796196 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3d0b949e-6c1b-4c5c-955e-53e6ab278555","Type":"ContainerStarted","Data":"56e56f46b220ea50bf36e49654b84dc2592b2d2cc71ab61bcdc4b4202406f83a"} Dec 02 14:37:26 crc kubenswrapper[4902]: I1202 14:37:26.798017 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-2tflw" event={"ID":"5f41756d-ba98-4fe9-9b76-7924f7879577","Type":"ContainerStarted","Data":"4218591e684d52c2e963d5dff94719008b8cafc5528609391f54d79e093bbbbc"} Dec 02 14:37:26 crc kubenswrapper[4902]: I1202 14:37:26.819167 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=2.819151699 podStartE2EDuration="2.819151699s" podCreationTimestamp="2025-12-02 14:37:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:37:26.813981393 +0000 UTC m=+1278.005290102" watchObservedRunningTime="2025-12-02 14:37:26.819151699 +0000 UTC m=+1278.010460408" Dec 02 14:37:27 crc kubenswrapper[4902]: I1202 14:37:27.339414 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="1d55b635-d5b6-4a77-9019-f8741adc7a93" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.172:9292/healthcheck\": dial tcp 10.217.0.172:9292: connect: connection refused" Dec 02 14:37:27 crc kubenswrapper[4902]: I1202 14:37:27.339433 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="1d55b635-d5b6-4a77-9019-f8741adc7a93" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.172:9292/healthcheck\": dial tcp 10.217.0.172:9292: connect: connection refused" Dec 02 14:37:27 crc kubenswrapper[4902]: I1202 14:37:27.835519 4902 generic.go:334] "Generic (PLEG): container finished" podID="1d55b635-d5b6-4a77-9019-f8741adc7a93" containerID="b1932c4ae1b856f2ee864e26a86aec06defff0bd97dcf0b546274265c8f87a61" exitCode=0 Dec 02 14:37:27 crc kubenswrapper[4902]: I1202 14:37:27.835628 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1d55b635-d5b6-4a77-9019-f8741adc7a93","Type":"ContainerDied","Data":"b1932c4ae1b856f2ee864e26a86aec06defff0bd97dcf0b546274265c8f87a61"} Dec 02 14:37:28 crc kubenswrapper[4902]: I1202 14:37:28.001447 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 02 14:37:28 crc kubenswrapper[4902]: I1202 14:37:28.036257 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Dec 02 14:37:28 crc kubenswrapper[4902]: I1202 14:37:28.846862 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 02 14:37:28 crc kubenswrapper[4902]: I1202 14:37:28.900152 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.091836 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.226388 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-config-data\") pod \"1d55b635-d5b6-4a77-9019-f8741adc7a93\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.226670 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-internal-tls-certs\") pod \"1d55b635-d5b6-4a77-9019-f8741adc7a93\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.226691 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"1d55b635-d5b6-4a77-9019-f8741adc7a93\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.226713 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcgtr\" (UniqueName: \"kubernetes.io/projected/1d55b635-d5b6-4a77-9019-f8741adc7a93-kube-api-access-kcgtr\") pod \"1d55b635-d5b6-4a77-9019-f8741adc7a93\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.226767 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-scripts\") pod \"1d55b635-d5b6-4a77-9019-f8741adc7a93\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.226812 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1d55b635-d5b6-4a77-9019-f8741adc7a93-httpd-run\") pod \"1d55b635-d5b6-4a77-9019-f8741adc7a93\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.226966 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-combined-ca-bundle\") pod \"1d55b635-d5b6-4a77-9019-f8741adc7a93\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.227004 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d55b635-d5b6-4a77-9019-f8741adc7a93-logs\") pod \"1d55b635-d5b6-4a77-9019-f8741adc7a93\" (UID: \"1d55b635-d5b6-4a77-9019-f8741adc7a93\") " Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.234732 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d55b635-d5b6-4a77-9019-f8741adc7a93-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "1d55b635-d5b6-4a77-9019-f8741adc7a93" (UID: "1d55b635-d5b6-4a77-9019-f8741adc7a93"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.234752 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d55b635-d5b6-4a77-9019-f8741adc7a93-logs" (OuterVolumeSpecName: "logs") pod "1d55b635-d5b6-4a77-9019-f8741adc7a93" (UID: "1d55b635-d5b6-4a77-9019-f8741adc7a93"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.235673 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-scripts" (OuterVolumeSpecName: "scripts") pod "1d55b635-d5b6-4a77-9019-f8741adc7a93" (UID: "1d55b635-d5b6-4a77-9019-f8741adc7a93"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.242617 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d55b635-d5b6-4a77-9019-f8741adc7a93-kube-api-access-kcgtr" (OuterVolumeSpecName: "kube-api-access-kcgtr") pod "1d55b635-d5b6-4a77-9019-f8741adc7a93" (UID: "1d55b635-d5b6-4a77-9019-f8741adc7a93"). InnerVolumeSpecName "kube-api-access-kcgtr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.247757 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "1d55b635-d5b6-4a77-9019-f8741adc7a93" (UID: "1d55b635-d5b6-4a77-9019-f8741adc7a93"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.280833 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d55b635-d5b6-4a77-9019-f8741adc7a93" (UID: "1d55b635-d5b6-4a77-9019-f8741adc7a93"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.295692 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-config-data" (OuterVolumeSpecName: "config-data") pod "1d55b635-d5b6-4a77-9019-f8741adc7a93" (UID: "1d55b635-d5b6-4a77-9019-f8741adc7a93"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.299243 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1d55b635-d5b6-4a77-9019-f8741adc7a93" (UID: "1d55b635-d5b6-4a77-9019-f8741adc7a93"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.329543 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.329582 4902 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1d55b635-d5b6-4a77-9019-f8741adc7a93-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.329593 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.329606 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d55b635-d5b6-4a77-9019-f8741adc7a93-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.329615 4902 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.329641 4902 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.329650 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d55b635-d5b6-4a77-9019-f8741adc7a93-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.329658 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcgtr\" (UniqueName: \"kubernetes.io/projected/1d55b635-d5b6-4a77-9019-f8741adc7a93-kube-api-access-kcgtr\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.350668 4902 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.431056 4902 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.861218 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efdd190-8701-4e86-9e8e-57a2f848e23d","Type":"ContainerStarted","Data":"57074325c499d24a2e9b3ebf4a929431c34afe032b59c56a16adb4c8b514161b"} Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.861624 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="ceilometer-central-agent" containerID="cri-o://ecad0b367efcd102591f32d8fd2637d1f5819f5bc05ade8dfb4a3128ae5dc910" gracePeriod=30 Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.862433 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.862660 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="proxy-httpd" containerID="cri-o://57074325c499d24a2e9b3ebf4a929431c34afe032b59c56a16adb4c8b514161b" gracePeriod=30 Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.862847 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="ceilometer-notification-agent" containerID="cri-o://b775febc42e2ab041bbb08eb9e3f0db75897538f72b7827a4ccf4faa3dfe73c6" gracePeriod=30 Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.862862 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="sg-core" containerID="cri-o://7a2290d06fedc19864e0c7553aa6c5c7f0bf13424f7b1c4020069f134c52fa4c" gracePeriod=30 Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.875543 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.876606 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1d55b635-d5b6-4a77-9019-f8741adc7a93","Type":"ContainerDied","Data":"0399ff36acccc9ccbdf08028c424ba627fd90eeba2bfd0280a631d3e80505d55"} Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.876660 4902 scope.go:117] "RemoveContainer" containerID="b1932c4ae1b856f2ee864e26a86aec06defff0bd97dcf0b546274265c8f87a61" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.889380 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.928317526 podStartE2EDuration="8.889364604s" podCreationTimestamp="2025-12-02 14:37:21 +0000 UTC" firstStartedPulling="2025-12-02 14:37:22.569797894 +0000 UTC m=+1273.761106603" lastFinishedPulling="2025-12-02 14:37:28.530844972 +0000 UTC m=+1279.722153681" observedRunningTime="2025-12-02 14:37:29.886734691 +0000 UTC m=+1281.078043400" watchObservedRunningTime="2025-12-02 14:37:29.889364604 +0000 UTC m=+1281.080673313" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.919557 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.920154 4902 scope.go:117] "RemoveContainer" containerID="7f9f473ac07c4df89e1bdc2cec14a94204e2ccc14412f2c05235b343422f9e59" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.932774 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.975641 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 14:37:29 crc kubenswrapper[4902]: E1202 14:37:29.988116 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d55b635-d5b6-4a77-9019-f8741adc7a93" containerName="glance-httpd" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.988158 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d55b635-d5b6-4a77-9019-f8741adc7a93" containerName="glance-httpd" Dec 02 14:37:29 crc kubenswrapper[4902]: E1202 14:37:29.988190 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d55b635-d5b6-4a77-9019-f8741adc7a93" containerName="glance-log" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.988197 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d55b635-d5b6-4a77-9019-f8741adc7a93" containerName="glance-log" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.988796 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d55b635-d5b6-4a77-9019-f8741adc7a93" containerName="glance-log" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.988817 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d55b635-d5b6-4a77-9019-f8741adc7a93" containerName="glance-httpd" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.995643 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.998439 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 02 14:37:29 crc kubenswrapper[4902]: I1202 14:37:29.998746 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.042529 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.050458 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.050523 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hslfh\" (UniqueName: \"kubernetes.io/projected/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-kube-api-access-hslfh\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.050550 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.050641 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.050680 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.050704 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.050725 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-logs\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.050783 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.152454 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.152516 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hslfh\" (UniqueName: \"kubernetes.io/projected/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-kube-api-access-hslfh\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.152551 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.152691 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.152717 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.152750 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.152781 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-logs\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.152843 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.153797 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-logs\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.154115 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.154251 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.157260 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.158013 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.164105 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.164433 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.186860 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.191511 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hslfh\" (UniqueName: \"kubernetes.io/projected/9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e-kube-api-access-hslfh\") pod \"glance-default-internal-api-0\" (UID: \"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e\") " pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.355176 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.815931 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.898125 4902 generic.go:334] "Generic (PLEG): container finished" podID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerID="57074325c499d24a2e9b3ebf4a929431c34afe032b59c56a16adb4c8b514161b" exitCode=0 Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.898156 4902 generic.go:334] "Generic (PLEG): container finished" podID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerID="7a2290d06fedc19864e0c7553aa6c5c7f0bf13424f7b1c4020069f134c52fa4c" exitCode=2 Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.898163 4902 generic.go:334] "Generic (PLEG): container finished" podID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerID="b775febc42e2ab041bbb08eb9e3f0db75897538f72b7827a4ccf4faa3dfe73c6" exitCode=0 Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.898202 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efdd190-8701-4e86-9e8e-57a2f848e23d","Type":"ContainerDied","Data":"57074325c499d24a2e9b3ebf4a929431c34afe032b59c56a16adb4c8b514161b"} Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.898229 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efdd190-8701-4e86-9e8e-57a2f848e23d","Type":"ContainerDied","Data":"7a2290d06fedc19864e0c7553aa6c5c7f0bf13424f7b1c4020069f134c52fa4c"} Dec 02 14:37:30 crc kubenswrapper[4902]: I1202 14:37:30.898240 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efdd190-8701-4e86-9e8e-57a2f848e23d","Type":"ContainerDied","Data":"b775febc42e2ab041bbb08eb9e3f0db75897538f72b7827a4ccf4faa3dfe73c6"} Dec 02 14:37:31 crc kubenswrapper[4902]: I1202 14:37:31.124180 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d55b635-d5b6-4a77-9019-f8741adc7a93" path="/var/lib/kubelet/pods/1d55b635-d5b6-4a77-9019-f8741adc7a93/volumes" Dec 02 14:37:31 crc kubenswrapper[4902]: I1202 14:37:31.312700 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 14:37:31 crc kubenswrapper[4902]: I1202 14:37:31.917080 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e","Type":"ContainerStarted","Data":"406d1c1ecbc61ef0ad48cdcd3d4681a8eae570d7db0c076b90ca3988e0f7b75d"} Dec 02 14:37:32 crc kubenswrapper[4902]: I1202 14:37:32.928117 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e","Type":"ContainerStarted","Data":"63654df093b78bc5888863ad37267217d0c0bb2955e09c6b720014a6cccd48b0"} Dec 02 14:37:32 crc kubenswrapper[4902]: I1202 14:37:32.928373 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e","Type":"ContainerStarted","Data":"59203d578390f852287ab482a4a2591b202ac5b35eec966db0a89f8a28312a64"} Dec 02 14:37:33 crc kubenswrapper[4902]: I1202 14:37:33.001111 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.001091958 podStartE2EDuration="4.001091958s" podCreationTimestamp="2025-12-02 14:37:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:37:32.94606321 +0000 UTC m=+1284.137371919" watchObservedRunningTime="2025-12-02 14:37:33.001091958 +0000 UTC m=+1284.192400667" Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.420464 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.420840 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.497475 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.509295 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.732102 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.732395 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.732436 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.733525 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"35031c6ab93a7172db862ff9a8aff08493444d048e85d1cd7f7711172563a23f"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.733617 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://35031c6ab93a7172db862ff9a8aff08493444d048e85d1cd7f7711172563a23f" gracePeriod=600 Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.953054 4902 generic.go:334] "Generic (PLEG): container finished" podID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerID="ecad0b367efcd102591f32d8fd2637d1f5819f5bc05ade8dfb4a3128ae5dc910" exitCode=0 Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.953113 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efdd190-8701-4e86-9e8e-57a2f848e23d","Type":"ContainerDied","Data":"ecad0b367efcd102591f32d8fd2637d1f5819f5bc05ade8dfb4a3128ae5dc910"} Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.956356 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="35031c6ab93a7172db862ff9a8aff08493444d048e85d1cd7f7711172563a23f" exitCode=0 Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.956410 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"35031c6ab93a7172db862ff9a8aff08493444d048e85d1cd7f7711172563a23f"} Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.956474 4902 scope.go:117] "RemoveContainer" containerID="fa3b2259dc7978de3e2c42de1d4c0b7a4d6a7a518fcd2965c20c0c6d1af8af33" Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.956711 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 14:37:34 crc kubenswrapper[4902]: I1202 14:37:34.956739 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 14:37:37 crc kubenswrapper[4902]: I1202 14:37:37.319607 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 14:37:37 crc kubenswrapper[4902]: I1202 14:37:37.320002 4902 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 14:37:37 crc kubenswrapper[4902]: I1202 14:37:37.421062 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 14:37:39 crc kubenswrapper[4902]: I1202 14:37:39.959067 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.034489 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-config-data\") pod \"2efdd190-8701-4e86-9e8e-57a2f848e23d\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.034815 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-combined-ca-bundle\") pod \"2efdd190-8701-4e86-9e8e-57a2f848e23d\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.034902 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-scripts\") pod \"2efdd190-8701-4e86-9e8e-57a2f848e23d\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.034931 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efdd190-8701-4e86-9e8e-57a2f848e23d-log-httpd\") pod \"2efdd190-8701-4e86-9e8e-57a2f848e23d\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.034961 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sn5tz\" (UniqueName: \"kubernetes.io/projected/2efdd190-8701-4e86-9e8e-57a2f848e23d-kube-api-access-sn5tz\") pod \"2efdd190-8701-4e86-9e8e-57a2f848e23d\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.035003 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efdd190-8701-4e86-9e8e-57a2f848e23d-run-httpd\") pod \"2efdd190-8701-4e86-9e8e-57a2f848e23d\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.035068 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-sg-core-conf-yaml\") pod \"2efdd190-8701-4e86-9e8e-57a2f848e23d\" (UID: \"2efdd190-8701-4e86-9e8e-57a2f848e23d\") " Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.036083 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2efdd190-8701-4e86-9e8e-57a2f848e23d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2efdd190-8701-4e86-9e8e-57a2f848e23d" (UID: "2efdd190-8701-4e86-9e8e-57a2f848e23d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.037675 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2efdd190-8701-4e86-9e8e-57a2f848e23d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2efdd190-8701-4e86-9e8e-57a2f848e23d" (UID: "2efdd190-8701-4e86-9e8e-57a2f848e23d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.037852 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2efdd190-8701-4e86-9e8e-57a2f848e23d","Type":"ContainerDied","Data":"51759ed7e262a857e6cdd9e611be584a46a44ec11886800e57c5898e9ac362cc"} Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.037903 4902 scope.go:117] "RemoveContainer" containerID="57074325c499d24a2e9b3ebf4a929431c34afe032b59c56a16adb4c8b514161b" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.038023 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.039508 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2efdd190-8701-4e86-9e8e-57a2f848e23d-kube-api-access-sn5tz" (OuterVolumeSpecName: "kube-api-access-sn5tz") pod "2efdd190-8701-4e86-9e8e-57a2f848e23d" (UID: "2efdd190-8701-4e86-9e8e-57a2f848e23d"). InnerVolumeSpecName "kube-api-access-sn5tz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.042530 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-scripts" (OuterVolumeSpecName: "scripts") pod "2efdd190-8701-4e86-9e8e-57a2f848e23d" (UID: "2efdd190-8701-4e86-9e8e-57a2f848e23d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.073883 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2efdd190-8701-4e86-9e8e-57a2f848e23d" (UID: "2efdd190-8701-4e86-9e8e-57a2f848e23d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.094384 4902 scope.go:117] "RemoveContainer" containerID="7a2290d06fedc19864e0c7553aa6c5c7f0bf13424f7b1c4020069f134c52fa4c" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.129133 4902 scope.go:117] "RemoveContainer" containerID="b775febc42e2ab041bbb08eb9e3f0db75897538f72b7827a4ccf4faa3dfe73c6" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.131488 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2efdd190-8701-4e86-9e8e-57a2f848e23d" (UID: "2efdd190-8701-4e86-9e8e-57a2f848e23d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.137280 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.137519 4902 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efdd190-8701-4e86-9e8e-57a2f848e23d-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.137624 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sn5tz\" (UniqueName: \"kubernetes.io/projected/2efdd190-8701-4e86-9e8e-57a2f848e23d-kube-api-access-sn5tz\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.137741 4902 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2efdd190-8701-4e86-9e8e-57a2f848e23d-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.137827 4902 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.137910 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.166315 4902 scope.go:117] "RemoveContainer" containerID="ecad0b367efcd102591f32d8fd2637d1f5819f5bc05ade8dfb4a3128ae5dc910" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.170319 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-config-data" (OuterVolumeSpecName: "config-data") pod "2efdd190-8701-4e86-9e8e-57a2f848e23d" (UID: "2efdd190-8701-4e86-9e8e-57a2f848e23d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.240127 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efdd190-8701-4e86-9e8e-57a2f848e23d-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.358267 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.358592 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.418400 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.424964 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.436626 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.444165 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:40 crc kubenswrapper[4902]: E1202 14:37:40.444553 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="proxy-httpd" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.444628 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="proxy-httpd" Dec 02 14:37:40 crc kubenswrapper[4902]: E1202 14:37:40.444646 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="ceilometer-central-agent" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.444653 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="ceilometer-central-agent" Dec 02 14:37:40 crc kubenswrapper[4902]: E1202 14:37:40.444672 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="ceilometer-notification-agent" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.444679 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="ceilometer-notification-agent" Dec 02 14:37:40 crc kubenswrapper[4902]: E1202 14:37:40.444706 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="sg-core" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.444712 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="sg-core" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.444986 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="sg-core" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.445014 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="ceilometer-notification-agent" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.445031 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="ceilometer-central-agent" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.445038 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" containerName="proxy-httpd" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.449418 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.449862 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.456933 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.458431 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.486604 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.545517 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9b07f0e-014d-4799-9d8f-43b273a991a4-log-httpd\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.545555 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9b07f0e-014d-4799-9d8f-43b273a991a4-run-httpd\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.545596 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-config-data\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.545621 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.545646 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.545665 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-scripts\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.545742 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwvkn\" (UniqueName: \"kubernetes.io/projected/c9b07f0e-014d-4799-9d8f-43b273a991a4-kube-api-access-rwvkn\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.649105 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwvkn\" (UniqueName: \"kubernetes.io/projected/c9b07f0e-014d-4799-9d8f-43b273a991a4-kube-api-access-rwvkn\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.649189 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9b07f0e-014d-4799-9d8f-43b273a991a4-log-httpd\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.649208 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9b07f0e-014d-4799-9d8f-43b273a991a4-run-httpd\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.649235 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-config-data\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.649262 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.649287 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.649311 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-scripts\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.649743 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9b07f0e-014d-4799-9d8f-43b273a991a4-run-httpd\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.649827 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9b07f0e-014d-4799-9d8f-43b273a991a4-log-httpd\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.654303 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-scripts\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.654488 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-config-data\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.656081 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.661944 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.679360 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwvkn\" (UniqueName: \"kubernetes.io/projected/c9b07f0e-014d-4799-9d8f-43b273a991a4-kube-api-access-rwvkn\") pod \"ceilometer-0\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " pod="openstack/ceilometer-0" Dec 02 14:37:40 crc kubenswrapper[4902]: I1202 14:37:40.770334 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:37:41 crc kubenswrapper[4902]: I1202 14:37:41.065862 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-2tflw" event={"ID":"5f41756d-ba98-4fe9-9b76-7924f7879577","Type":"ContainerStarted","Data":"b68210ce3c87dd829683b4c920f83244c49db2ba9db82ede14962ef90911d1e1"} Dec 02 14:37:41 crc kubenswrapper[4902]: I1202 14:37:41.073228 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"2cb387f91a89f081e93103a087cb756dd66d0727efc64f860756ae59c75099f5"} Dec 02 14:37:41 crc kubenswrapper[4902]: I1202 14:37:41.073979 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 14:37:41 crc kubenswrapper[4902]: I1202 14:37:41.074074 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 14:37:41 crc kubenswrapper[4902]: I1202 14:37:41.089603 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-2tflw" podStartSLOduration=3.099508933 podStartE2EDuration="17.08958627s" podCreationTimestamp="2025-12-02 14:37:24 +0000 UTC" firstStartedPulling="2025-12-02 14:37:26.016330653 +0000 UTC m=+1277.207639362" lastFinishedPulling="2025-12-02 14:37:40.00640799 +0000 UTC m=+1291.197716699" observedRunningTime="2025-12-02 14:37:41.08389386 +0000 UTC m=+1292.275202569" watchObservedRunningTime="2025-12-02 14:37:41.08958627 +0000 UTC m=+1292.280894979" Dec 02 14:37:41 crc kubenswrapper[4902]: I1202 14:37:41.128102 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2efdd190-8701-4e86-9e8e-57a2f848e23d" path="/var/lib/kubelet/pods/2efdd190-8701-4e86-9e8e-57a2f848e23d/volumes" Dec 02 14:37:41 crc kubenswrapper[4902]: I1202 14:37:41.287053 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 14:37:41 crc kubenswrapper[4902]: I1202 14:37:41.290473 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:42 crc kubenswrapper[4902]: I1202 14:37:42.081301 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c9b07f0e-014d-4799-9d8f-43b273a991a4","Type":"ContainerStarted","Data":"e7659abebb5595683818119c2e4dbc6e8c698db91172de3a3a6ac3bcfc246864"} Dec 02 14:37:42 crc kubenswrapper[4902]: I1202 14:37:42.335097 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:43 crc kubenswrapper[4902]: I1202 14:37:43.092874 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c9b07f0e-014d-4799-9d8f-43b273a991a4","Type":"ContainerStarted","Data":"efe20cbb708961fe3f2f75ed91a5ba15173cd1992bbd46283d578f91ca09876c"} Dec 02 14:37:43 crc kubenswrapper[4902]: I1202 14:37:43.093125 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c9b07f0e-014d-4799-9d8f-43b273a991a4","Type":"ContainerStarted","Data":"456029b7f9633e8a71195abb35b3fa32afee39251dcceaa095c311ee0d2ed089"} Dec 02 14:37:43 crc kubenswrapper[4902]: I1202 14:37:43.155400 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 14:37:43 crc kubenswrapper[4902]: I1202 14:37:43.155541 4902 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 14:37:43 crc kubenswrapper[4902]: I1202 14:37:43.895256 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 14:37:44 crc kubenswrapper[4902]: I1202 14:37:44.104259 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c9b07f0e-014d-4799-9d8f-43b273a991a4","Type":"ContainerStarted","Data":"21463af6555fc69a8ed5a620e3b721eba38b3f9250e9dc876da0c149a256b156"} Dec 02 14:37:46 crc kubenswrapper[4902]: I1202 14:37:46.125749 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c9b07f0e-014d-4799-9d8f-43b273a991a4","Type":"ContainerStarted","Data":"b28908441a8e10fee1781af50f642f90b1b9ce5702adfa1193439e63a0e59c33"} Dec 02 14:37:46 crc kubenswrapper[4902]: I1202 14:37:46.126268 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 14:37:46 crc kubenswrapper[4902]: I1202 14:37:46.125922 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="ceilometer-central-agent" containerID="cri-o://456029b7f9633e8a71195abb35b3fa32afee39251dcceaa095c311ee0d2ed089" gracePeriod=30 Dec 02 14:37:46 crc kubenswrapper[4902]: I1202 14:37:46.126040 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="ceilometer-notification-agent" containerID="cri-o://efe20cbb708961fe3f2f75ed91a5ba15173cd1992bbd46283d578f91ca09876c" gracePeriod=30 Dec 02 14:37:46 crc kubenswrapper[4902]: I1202 14:37:46.126074 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="sg-core" containerID="cri-o://21463af6555fc69a8ed5a620e3b721eba38b3f9250e9dc876da0c149a256b156" gracePeriod=30 Dec 02 14:37:46 crc kubenswrapper[4902]: I1202 14:37:46.126010 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="proxy-httpd" containerID="cri-o://b28908441a8e10fee1781af50f642f90b1b9ce5702adfa1193439e63a0e59c33" gracePeriod=30 Dec 02 14:37:46 crc kubenswrapper[4902]: I1202 14:37:46.161498 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.896458359 podStartE2EDuration="6.161478104s" podCreationTimestamp="2025-12-02 14:37:40 +0000 UTC" firstStartedPulling="2025-12-02 14:37:41.286790396 +0000 UTC m=+1292.478099105" lastFinishedPulling="2025-12-02 14:37:45.551810141 +0000 UTC m=+1296.743118850" observedRunningTime="2025-12-02 14:37:46.151940896 +0000 UTC m=+1297.343249625" watchObservedRunningTime="2025-12-02 14:37:46.161478104 +0000 UTC m=+1297.352786813" Dec 02 14:37:47 crc kubenswrapper[4902]: I1202 14:37:47.138763 4902 generic.go:334] "Generic (PLEG): container finished" podID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerID="b28908441a8e10fee1781af50f642f90b1b9ce5702adfa1193439e63a0e59c33" exitCode=0 Dec 02 14:37:47 crc kubenswrapper[4902]: I1202 14:37:47.139134 4902 generic.go:334] "Generic (PLEG): container finished" podID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerID="21463af6555fc69a8ed5a620e3b721eba38b3f9250e9dc876da0c149a256b156" exitCode=2 Dec 02 14:37:47 crc kubenswrapper[4902]: I1202 14:37:47.139148 4902 generic.go:334] "Generic (PLEG): container finished" podID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerID="efe20cbb708961fe3f2f75ed91a5ba15173cd1992bbd46283d578f91ca09876c" exitCode=0 Dec 02 14:37:47 crc kubenswrapper[4902]: I1202 14:37:47.138853 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c9b07f0e-014d-4799-9d8f-43b273a991a4","Type":"ContainerDied","Data":"b28908441a8e10fee1781af50f642f90b1b9ce5702adfa1193439e63a0e59c33"} Dec 02 14:37:47 crc kubenswrapper[4902]: I1202 14:37:47.139188 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c9b07f0e-014d-4799-9d8f-43b273a991a4","Type":"ContainerDied","Data":"21463af6555fc69a8ed5a620e3b721eba38b3f9250e9dc876da0c149a256b156"} Dec 02 14:37:47 crc kubenswrapper[4902]: I1202 14:37:47.139206 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c9b07f0e-014d-4799-9d8f-43b273a991a4","Type":"ContainerDied","Data":"efe20cbb708961fe3f2f75ed91a5ba15173cd1992bbd46283d578f91ca09876c"} Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.148702 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.193531 4902 generic.go:334] "Generic (PLEG): container finished" podID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerID="27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f" exitCode=137 Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.194099 4902 generic.go:334] "Generic (PLEG): container finished" podID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerID="b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8" exitCode=137 Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.194137 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85bf5878d8-bn7cr" event={"ID":"6eab04a1-6320-449e-9f31-600aa46a57b0","Type":"ContainerDied","Data":"27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f"} Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.194194 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85bf5878d8-bn7cr" event={"ID":"6eab04a1-6320-449e-9f31-600aa46a57b0","Type":"ContainerDied","Data":"b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8"} Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.194209 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85bf5878d8-bn7cr" event={"ID":"6eab04a1-6320-449e-9f31-600aa46a57b0","Type":"ContainerDied","Data":"6eab603030d2831bb48c4d8a43b8ef55e3192edcf14c9d5797183544edbbf777"} Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.194233 4902 scope.go:117] "RemoveContainer" containerID="27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.194548 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-85bf5878d8-bn7cr" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.292004 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6eab04a1-6320-449e-9f31-600aa46a57b0-logs\") pod \"6eab04a1-6320-449e-9f31-600aa46a57b0\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.292197 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-horizon-tls-certs\") pod \"6eab04a1-6320-449e-9f31-600aa46a57b0\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.292227 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6jdx\" (UniqueName: \"kubernetes.io/projected/6eab04a1-6320-449e-9f31-600aa46a57b0-kube-api-access-v6jdx\") pod \"6eab04a1-6320-449e-9f31-600aa46a57b0\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.292279 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6eab04a1-6320-449e-9f31-600aa46a57b0-scripts\") pod \"6eab04a1-6320-449e-9f31-600aa46a57b0\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.292331 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-horizon-secret-key\") pod \"6eab04a1-6320-449e-9f31-600aa46a57b0\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.292347 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6eab04a1-6320-449e-9f31-600aa46a57b0-config-data\") pod \"6eab04a1-6320-449e-9f31-600aa46a57b0\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.292393 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-combined-ca-bundle\") pod \"6eab04a1-6320-449e-9f31-600aa46a57b0\" (UID: \"6eab04a1-6320-449e-9f31-600aa46a57b0\") " Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.292524 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6eab04a1-6320-449e-9f31-600aa46a57b0-logs" (OuterVolumeSpecName: "logs") pod "6eab04a1-6320-449e-9f31-600aa46a57b0" (UID: "6eab04a1-6320-449e-9f31-600aa46a57b0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.292932 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6eab04a1-6320-449e-9f31-600aa46a57b0-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.318138 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "6eab04a1-6320-449e-9f31-600aa46a57b0" (UID: "6eab04a1-6320-449e-9f31-600aa46a57b0"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.318144 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6eab04a1-6320-449e-9f31-600aa46a57b0-kube-api-access-v6jdx" (OuterVolumeSpecName: "kube-api-access-v6jdx") pod "6eab04a1-6320-449e-9f31-600aa46a57b0" (UID: "6eab04a1-6320-449e-9f31-600aa46a57b0"). InnerVolumeSpecName "kube-api-access-v6jdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.323636 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6eab04a1-6320-449e-9f31-600aa46a57b0" (UID: "6eab04a1-6320-449e-9f31-600aa46a57b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.325254 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eab04a1-6320-449e-9f31-600aa46a57b0-scripts" (OuterVolumeSpecName: "scripts") pod "6eab04a1-6320-449e-9f31-600aa46a57b0" (UID: "6eab04a1-6320-449e-9f31-600aa46a57b0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.326855 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eab04a1-6320-449e-9f31-600aa46a57b0-config-data" (OuterVolumeSpecName: "config-data") pod "6eab04a1-6320-449e-9f31-600aa46a57b0" (UID: "6eab04a1-6320-449e-9f31-600aa46a57b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.347847 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "6eab04a1-6320-449e-9f31-600aa46a57b0" (UID: "6eab04a1-6320-449e-9f31-600aa46a57b0"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.366941 4902 scope.go:117] "RemoveContainer" containerID="462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.394644 4902 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.394687 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6jdx\" (UniqueName: \"kubernetes.io/projected/6eab04a1-6320-449e-9f31-600aa46a57b0-kube-api-access-v6jdx\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.394704 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6eab04a1-6320-449e-9f31-600aa46a57b0-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.394714 4902 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.394727 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6eab04a1-6320-449e-9f31-600aa46a57b0-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.394738 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eab04a1-6320-449e-9f31-600aa46a57b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.547939 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-85bf5878d8-bn7cr"] Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.556272 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-85bf5878d8-bn7cr"] Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.558502 4902 scope.go:117] "RemoveContainer" containerID="b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.591234 4902 scope.go:117] "RemoveContainer" containerID="27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f" Dec 02 14:37:52 crc kubenswrapper[4902]: E1202 14:37:52.591861 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f\": container with ID starting with 27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f not found: ID does not exist" containerID="27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.591927 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f"} err="failed to get container status \"27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f\": rpc error: code = NotFound desc = could not find container \"27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f\": container with ID starting with 27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f not found: ID does not exist" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.591978 4902 scope.go:117] "RemoveContainer" containerID="462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924" Dec 02 14:37:52 crc kubenswrapper[4902]: E1202 14:37:52.592543 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924\": container with ID starting with 462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924 not found: ID does not exist" containerID="462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.592603 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924"} err="failed to get container status \"462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924\": rpc error: code = NotFound desc = could not find container \"462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924\": container with ID starting with 462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924 not found: ID does not exist" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.592625 4902 scope.go:117] "RemoveContainer" containerID="b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8" Dec 02 14:37:52 crc kubenswrapper[4902]: E1202 14:37:52.593719 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8\": container with ID starting with b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8 not found: ID does not exist" containerID="b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.593772 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8"} err="failed to get container status \"b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8\": rpc error: code = NotFound desc = could not find container \"b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8\": container with ID starting with b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8 not found: ID does not exist" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.593807 4902 scope.go:117] "RemoveContainer" containerID="27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.594351 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f"} err="failed to get container status \"27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f\": rpc error: code = NotFound desc = could not find container \"27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f\": container with ID starting with 27be99e2ef0cf4470e888efc3afa8a290f24c7d9369367e42449c1525591147f not found: ID does not exist" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.594390 4902 scope.go:117] "RemoveContainer" containerID="462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.595872 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924"} err="failed to get container status \"462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924\": rpc error: code = NotFound desc = could not find container \"462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924\": container with ID starting with 462f4ebe2d05a218759adaf7e488802bc7a0e2e9fe3774965a3b81d848c7d924 not found: ID does not exist" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.595924 4902 scope.go:117] "RemoveContainer" containerID="b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8" Dec 02 14:37:52 crc kubenswrapper[4902]: I1202 14:37:52.596929 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8"} err="failed to get container status \"b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8\": rpc error: code = NotFound desc = could not find container \"b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8\": container with ID starting with b73aa17785aa91e050ea94f4b6ae7e09eadbcbe272f04ff468f6f79dc49ce1e8 not found: ID does not exist" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.118637 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" path="/var/lib/kubelet/pods/6eab04a1-6320-449e-9f31-600aa46a57b0/volumes" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.212630 4902 generic.go:334] "Generic (PLEG): container finished" podID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerID="456029b7f9633e8a71195abb35b3fa32afee39251dcceaa095c311ee0d2ed089" exitCode=0 Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.213483 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c9b07f0e-014d-4799-9d8f-43b273a991a4","Type":"ContainerDied","Data":"456029b7f9633e8a71195abb35b3fa32afee39251dcceaa095c311ee0d2ed089"} Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.530143 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.618744 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwvkn\" (UniqueName: \"kubernetes.io/projected/c9b07f0e-014d-4799-9d8f-43b273a991a4-kube-api-access-rwvkn\") pod \"c9b07f0e-014d-4799-9d8f-43b273a991a4\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.618845 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-combined-ca-bundle\") pod \"c9b07f0e-014d-4799-9d8f-43b273a991a4\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.618904 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9b07f0e-014d-4799-9d8f-43b273a991a4-log-httpd\") pod \"c9b07f0e-014d-4799-9d8f-43b273a991a4\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.618967 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-sg-core-conf-yaml\") pod \"c9b07f0e-014d-4799-9d8f-43b273a991a4\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.619002 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-config-data\") pod \"c9b07f0e-014d-4799-9d8f-43b273a991a4\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.619069 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-scripts\") pod \"c9b07f0e-014d-4799-9d8f-43b273a991a4\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.619127 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9b07f0e-014d-4799-9d8f-43b273a991a4-run-httpd\") pod \"c9b07f0e-014d-4799-9d8f-43b273a991a4\" (UID: \"c9b07f0e-014d-4799-9d8f-43b273a991a4\") " Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.620109 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9b07f0e-014d-4799-9d8f-43b273a991a4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c9b07f0e-014d-4799-9d8f-43b273a991a4" (UID: "c9b07f0e-014d-4799-9d8f-43b273a991a4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.620152 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9b07f0e-014d-4799-9d8f-43b273a991a4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c9b07f0e-014d-4799-9d8f-43b273a991a4" (UID: "c9b07f0e-014d-4799-9d8f-43b273a991a4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.625085 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9b07f0e-014d-4799-9d8f-43b273a991a4-kube-api-access-rwvkn" (OuterVolumeSpecName: "kube-api-access-rwvkn") pod "c9b07f0e-014d-4799-9d8f-43b273a991a4" (UID: "c9b07f0e-014d-4799-9d8f-43b273a991a4"). InnerVolumeSpecName "kube-api-access-rwvkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.626590 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-scripts" (OuterVolumeSpecName: "scripts") pod "c9b07f0e-014d-4799-9d8f-43b273a991a4" (UID: "c9b07f0e-014d-4799-9d8f-43b273a991a4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.649970 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c9b07f0e-014d-4799-9d8f-43b273a991a4" (UID: "c9b07f0e-014d-4799-9d8f-43b273a991a4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.698903 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9b07f0e-014d-4799-9d8f-43b273a991a4" (UID: "c9b07f0e-014d-4799-9d8f-43b273a991a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.721400 4902 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.721737 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.721866 4902 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9b07f0e-014d-4799-9d8f-43b273a991a4-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.721981 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwvkn\" (UniqueName: \"kubernetes.io/projected/c9b07f0e-014d-4799-9d8f-43b273a991a4-kube-api-access-rwvkn\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.722097 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.722219 4902 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9b07f0e-014d-4799-9d8f-43b273a991a4-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.738717 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-config-data" (OuterVolumeSpecName: "config-data") pod "c9b07f0e-014d-4799-9d8f-43b273a991a4" (UID: "c9b07f0e-014d-4799-9d8f-43b273a991a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:53 crc kubenswrapper[4902]: I1202 14:37:53.825645 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9b07f0e-014d-4799-9d8f-43b273a991a4-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.222475 4902 generic.go:334] "Generic (PLEG): container finished" podID="5f41756d-ba98-4fe9-9b76-7924f7879577" containerID="b68210ce3c87dd829683b4c920f83244c49db2ba9db82ede14962ef90911d1e1" exitCode=0 Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.222542 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-2tflw" event={"ID":"5f41756d-ba98-4fe9-9b76-7924f7879577","Type":"ContainerDied","Data":"b68210ce3c87dd829683b4c920f83244c49db2ba9db82ede14962ef90911d1e1"} Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.224999 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c9b07f0e-014d-4799-9d8f-43b273a991a4","Type":"ContainerDied","Data":"e7659abebb5595683818119c2e4dbc6e8c698db91172de3a3a6ac3bcfc246864"} Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.225072 4902 scope.go:117] "RemoveContainer" containerID="b28908441a8e10fee1781af50f642f90b1b9ce5702adfa1193439e63a0e59c33" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.225153 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.260656 4902 scope.go:117] "RemoveContainer" containerID="21463af6555fc69a8ed5a620e3b721eba38b3f9250e9dc876da0c149a256b156" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.296208 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.300960 4902 scope.go:117] "RemoveContainer" containerID="efe20cbb708961fe3f2f75ed91a5ba15173cd1992bbd46283d578f91ca09876c" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.308526 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.331341 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:54 crc kubenswrapper[4902]: E1202 14:37:54.331946 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="ceilometer-notification-agent" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.331973 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="ceilometer-notification-agent" Dec 02 14:37:54 crc kubenswrapper[4902]: E1202 14:37:54.331992 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="ceilometer-central-agent" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.332004 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="ceilometer-central-agent" Dec 02 14:37:54 crc kubenswrapper[4902]: E1202 14:37:54.332020 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerName="horizon" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.332031 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerName="horizon" Dec 02 14:37:54 crc kubenswrapper[4902]: E1202 14:37:54.332048 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="proxy-httpd" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.332058 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="proxy-httpd" Dec 02 14:37:54 crc kubenswrapper[4902]: E1202 14:37:54.332086 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="sg-core" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.332099 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="sg-core" Dec 02 14:37:54 crc kubenswrapper[4902]: E1202 14:37:54.332116 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerName="horizon" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.332127 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerName="horizon" Dec 02 14:37:54 crc kubenswrapper[4902]: E1202 14:37:54.332144 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerName="horizon-log" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.332157 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerName="horizon-log" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.332450 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="ceilometer-notification-agent" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.332488 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerName="horizon" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.332510 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerName="horizon-log" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.332531 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="sg-core" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.332545 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="ceilometer-central-agent" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.332583 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" containerName="proxy-httpd" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.333226 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="6eab04a1-6320-449e-9f31-600aa46a57b0" containerName="horizon" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.333438 4902 scope.go:117] "RemoveContainer" containerID="456029b7f9633e8a71195abb35b3fa32afee39251dcceaa095c311ee0d2ed089" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.335451 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.341248 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.345013 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.348931 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.436679 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-scripts\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.436721 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-config-data\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.436761 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a891d0bb-de55-4715-86a6-e0d1c893322d-run-httpd\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.436794 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.436859 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmkjd\" (UniqueName: \"kubernetes.io/projected/a891d0bb-de55-4715-86a6-e0d1c893322d-kube-api-access-bmkjd\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.436943 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.436971 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a891d0bb-de55-4715-86a6-e0d1c893322d-log-httpd\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.539435 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-scripts\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.539862 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-config-data\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.540206 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a891d0bb-de55-4715-86a6-e0d1c893322d-run-httpd\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.540722 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.540924 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a891d0bb-de55-4715-86a6-e0d1c893322d-run-httpd\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.540946 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmkjd\" (UniqueName: \"kubernetes.io/projected/a891d0bb-de55-4715-86a6-e0d1c893322d-kube-api-access-bmkjd\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.541420 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.541872 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a891d0bb-de55-4715-86a6-e0d1c893322d-log-httpd\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.542507 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a891d0bb-de55-4715-86a6-e0d1c893322d-log-httpd\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.545475 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-scripts\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.545891 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.548448 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.550211 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-config-data\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.563973 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmkjd\" (UniqueName: \"kubernetes.io/projected/a891d0bb-de55-4715-86a6-e0d1c893322d-kube-api-access-bmkjd\") pod \"ceilometer-0\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " pod="openstack/ceilometer-0" Dec 02 14:37:54 crc kubenswrapper[4902]: I1202 14:37:54.660842 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.126038 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9b07f0e-014d-4799-9d8f-43b273a991a4" path="/var/lib/kubelet/pods/c9b07f0e-014d-4799-9d8f-43b273a991a4/volumes" Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.159267 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:37:55 crc kubenswrapper[4902]: W1202 14:37:55.164242 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda891d0bb_de55_4715_86a6_e0d1c893322d.slice/crio-ac869843a29ac862b9bf3ad0a82c3db84fcf32806a9b022010a3d15c00683898 WatchSource:0}: Error finding container ac869843a29ac862b9bf3ad0a82c3db84fcf32806a9b022010a3d15c00683898: Status 404 returned error can't find the container with id ac869843a29ac862b9bf3ad0a82c3db84fcf32806a9b022010a3d15c00683898 Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.239101 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a891d0bb-de55-4715-86a6-e0d1c893322d","Type":"ContainerStarted","Data":"ac869843a29ac862b9bf3ad0a82c3db84fcf32806a9b022010a3d15c00683898"} Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.619257 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.763877 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-config-data\") pod \"5f41756d-ba98-4fe9-9b76-7924f7879577\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.764506 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bn2d4\" (UniqueName: \"kubernetes.io/projected/5f41756d-ba98-4fe9-9b76-7924f7879577-kube-api-access-bn2d4\") pod \"5f41756d-ba98-4fe9-9b76-7924f7879577\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.764772 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-scripts\") pod \"5f41756d-ba98-4fe9-9b76-7924f7879577\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.764923 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-combined-ca-bundle\") pod \"5f41756d-ba98-4fe9-9b76-7924f7879577\" (UID: \"5f41756d-ba98-4fe9-9b76-7924f7879577\") " Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.770848 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-scripts" (OuterVolumeSpecName: "scripts") pod "5f41756d-ba98-4fe9-9b76-7924f7879577" (UID: "5f41756d-ba98-4fe9-9b76-7924f7879577"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.771139 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f41756d-ba98-4fe9-9b76-7924f7879577-kube-api-access-bn2d4" (OuterVolumeSpecName: "kube-api-access-bn2d4") pod "5f41756d-ba98-4fe9-9b76-7924f7879577" (UID: "5f41756d-ba98-4fe9-9b76-7924f7879577"). InnerVolumeSpecName "kube-api-access-bn2d4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.799389 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-config-data" (OuterVolumeSpecName: "config-data") pod "5f41756d-ba98-4fe9-9b76-7924f7879577" (UID: "5f41756d-ba98-4fe9-9b76-7924f7879577"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.802422 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5f41756d-ba98-4fe9-9b76-7924f7879577" (UID: "5f41756d-ba98-4fe9-9b76-7924f7879577"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.867657 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.867694 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.867710 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f41756d-ba98-4fe9-9b76-7924f7879577-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:55 crc kubenswrapper[4902]: I1202 14:37:55.867723 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bn2d4\" (UniqueName: \"kubernetes.io/projected/5f41756d-ba98-4fe9-9b76-7924f7879577-kube-api-access-bn2d4\") on node \"crc\" DevicePath \"\"" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.280869 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-2tflw" event={"ID":"5f41756d-ba98-4fe9-9b76-7924f7879577","Type":"ContainerDied","Data":"4218591e684d52c2e963d5dff94719008b8cafc5528609391f54d79e093bbbbc"} Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.280930 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4218591e684d52c2e963d5dff94719008b8cafc5528609391f54d79e093bbbbc" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.281010 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-2tflw" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.395333 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 14:37:56 crc kubenswrapper[4902]: E1202 14:37:56.395798 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f41756d-ba98-4fe9-9b76-7924f7879577" containerName="nova-cell0-conductor-db-sync" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.395818 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f41756d-ba98-4fe9-9b76-7924f7879577" containerName="nova-cell0-conductor-db-sync" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.396032 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f41756d-ba98-4fe9-9b76-7924f7879577" containerName="nova-cell0-conductor-db-sync" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.396654 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.399791 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-zfd96" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.402597 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.428376 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.589362 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5s9r4\" (UniqueName: \"kubernetes.io/projected/c866917d-0d79-4d60-8d9b-066986964749-kube-api-access-5s9r4\") pod \"nova-cell0-conductor-0\" (UID: \"c866917d-0d79-4d60-8d9b-066986964749\") " pod="openstack/nova-cell0-conductor-0" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.589665 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c866917d-0d79-4d60-8d9b-066986964749-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c866917d-0d79-4d60-8d9b-066986964749\") " pod="openstack/nova-cell0-conductor-0" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.589721 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c866917d-0d79-4d60-8d9b-066986964749-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c866917d-0d79-4d60-8d9b-066986964749\") " pod="openstack/nova-cell0-conductor-0" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.691961 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5s9r4\" (UniqueName: \"kubernetes.io/projected/c866917d-0d79-4d60-8d9b-066986964749-kube-api-access-5s9r4\") pod \"nova-cell0-conductor-0\" (UID: \"c866917d-0d79-4d60-8d9b-066986964749\") " pod="openstack/nova-cell0-conductor-0" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.692075 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c866917d-0d79-4d60-8d9b-066986964749-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c866917d-0d79-4d60-8d9b-066986964749\") " pod="openstack/nova-cell0-conductor-0" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.692129 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c866917d-0d79-4d60-8d9b-066986964749-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c866917d-0d79-4d60-8d9b-066986964749\") " pod="openstack/nova-cell0-conductor-0" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.696336 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c866917d-0d79-4d60-8d9b-066986964749-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c866917d-0d79-4d60-8d9b-066986964749\") " pod="openstack/nova-cell0-conductor-0" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.697011 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c866917d-0d79-4d60-8d9b-066986964749-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c866917d-0d79-4d60-8d9b-066986964749\") " pod="openstack/nova-cell0-conductor-0" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.723381 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5s9r4\" (UniqueName: \"kubernetes.io/projected/c866917d-0d79-4d60-8d9b-066986964749-kube-api-access-5s9r4\") pod \"nova-cell0-conductor-0\" (UID: \"c866917d-0d79-4d60-8d9b-066986964749\") " pod="openstack/nova-cell0-conductor-0" Dec 02 14:37:56 crc kubenswrapper[4902]: I1202 14:37:56.795099 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 02 14:37:57 crc kubenswrapper[4902]: I1202 14:37:57.250022 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 14:37:57 crc kubenswrapper[4902]: W1202 14:37:57.250594 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc866917d_0d79_4d60_8d9b_066986964749.slice/crio-eee4f19a3529ef9cb1cbb84994988da6831e42b3aca6aa7e7caa9aa17e201707 WatchSource:0}: Error finding container eee4f19a3529ef9cb1cbb84994988da6831e42b3aca6aa7e7caa9aa17e201707: Status 404 returned error can't find the container with id eee4f19a3529ef9cb1cbb84994988da6831e42b3aca6aa7e7caa9aa17e201707 Dec 02 14:37:57 crc kubenswrapper[4902]: I1202 14:37:57.291127 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c866917d-0d79-4d60-8d9b-066986964749","Type":"ContainerStarted","Data":"eee4f19a3529ef9cb1cbb84994988da6831e42b3aca6aa7e7caa9aa17e201707"} Dec 02 14:37:57 crc kubenswrapper[4902]: I1202 14:37:57.293294 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a891d0bb-de55-4715-86a6-e0d1c893322d","Type":"ContainerStarted","Data":"9131e98de1c79a3dc58bf3e115f061f9ee66c8ccc8574f3c5020d8a5a6431585"} Dec 02 14:37:58 crc kubenswrapper[4902]: I1202 14:37:58.312095 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a891d0bb-de55-4715-86a6-e0d1c893322d","Type":"ContainerStarted","Data":"70933a1f9a137013c86791f9b5fc12260ffff205fbd6ac0884fa93b639792d44"} Dec 02 14:37:58 crc kubenswrapper[4902]: I1202 14:37:58.312551 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a891d0bb-de55-4715-86a6-e0d1c893322d","Type":"ContainerStarted","Data":"ca71e50552dfd4c295675dc22e477d5f21fd27b02123cb96ad22306542af25c0"} Dec 02 14:37:58 crc kubenswrapper[4902]: I1202 14:37:58.314779 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c866917d-0d79-4d60-8d9b-066986964749","Type":"ContainerStarted","Data":"f144fe5a54c8f361f77ca78073dd91a7cda525ecd3efdd121dcbaeae08b42354"} Dec 02 14:37:58 crc kubenswrapper[4902]: I1202 14:37:58.314948 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 02 14:37:58 crc kubenswrapper[4902]: I1202 14:37:58.340190 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.340168832 podStartE2EDuration="2.340168832s" podCreationTimestamp="2025-12-02 14:37:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:37:58.328240784 +0000 UTC m=+1309.519549513" watchObservedRunningTime="2025-12-02 14:37:58.340168832 +0000 UTC m=+1309.531477541" Dec 02 14:38:00 crc kubenswrapper[4902]: I1202 14:38:00.335265 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a891d0bb-de55-4715-86a6-e0d1c893322d","Type":"ContainerStarted","Data":"8e676c6bfedd3616a8b8f3619eaa527ad22ad988b2bebaac77bf1cc1dd31fc52"} Dec 02 14:38:00 crc kubenswrapper[4902]: I1202 14:38:00.335664 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 14:38:00 crc kubenswrapper[4902]: I1202 14:38:00.374799 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.146808912 podStartE2EDuration="6.374773786s" podCreationTimestamp="2025-12-02 14:37:54 +0000 UTC" firstStartedPulling="2025-12-02 14:37:55.170082449 +0000 UTC m=+1306.361391158" lastFinishedPulling="2025-12-02 14:37:59.398047323 +0000 UTC m=+1310.589356032" observedRunningTime="2025-12-02 14:38:00.370596227 +0000 UTC m=+1311.561904936" watchObservedRunningTime="2025-12-02 14:38:00.374773786 +0000 UTC m=+1311.566082495" Dec 02 14:38:06 crc kubenswrapper[4902]: I1202 14:38:06.851724 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.509980 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-jx5xc"] Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.511741 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.513958 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.514210 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.552843 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-jx5xc"] Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.685747 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.687306 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.690399 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.702750 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-scripts\") pod \"nova-cell0-cell-mapping-jx5xc\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.702799 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-config-data\") pod \"nova-cell0-cell-mapping-jx5xc\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.702840 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-jx5xc\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.702901 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6pcs\" (UniqueName: \"kubernetes.io/projected/117c518f-dc8f-49d4-a389-a42e671ad97a-kube-api-access-j6pcs\") pod \"nova-cell0-cell-mapping-jx5xc\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.719742 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.776295 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.780175 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.790854 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.808591 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6pcs\" (UniqueName: \"kubernetes.io/projected/117c518f-dc8f-49d4-a389-a42e671ad97a-kube-api-access-j6pcs\") pod \"nova-cell0-cell-mapping-jx5xc\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.808654 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c38ee1b-f029-4d5d-91cc-fd29714708b3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " pod="openstack/nova-api-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.808699 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztz78\" (UniqueName: \"kubernetes.io/projected/9c38ee1b-f029-4d5d-91cc-fd29714708b3-kube-api-access-ztz78\") pod \"nova-api-0\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " pod="openstack/nova-api-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.808753 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c38ee1b-f029-4d5d-91cc-fd29714708b3-config-data\") pod \"nova-api-0\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " pod="openstack/nova-api-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.808785 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c38ee1b-f029-4d5d-91cc-fd29714708b3-logs\") pod \"nova-api-0\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " pod="openstack/nova-api-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.808821 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-scripts\") pod \"nova-cell0-cell-mapping-jx5xc\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.808850 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-config-data\") pod \"nova-cell0-cell-mapping-jx5xc\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.808886 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-jx5xc\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.822349 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-jx5xc\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.824998 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.833295 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-config-data\") pod \"nova-cell0-cell-mapping-jx5xc\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.879332 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-scripts\") pod \"nova-cell0-cell-mapping-jx5xc\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.896179 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6pcs\" (UniqueName: \"kubernetes.io/projected/117c518f-dc8f-49d4-a389-a42e671ad97a-kube-api-access-j6pcs\") pod \"nova-cell0-cell-mapping-jx5xc\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.911396 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09ab87f8-b746-4da6-9174-02f3c052d845-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"09ab87f8-b746-4da6-9174-02f3c052d845\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.911465 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c38ee1b-f029-4d5d-91cc-fd29714708b3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " pod="openstack/nova-api-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.911502 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztz78\" (UniqueName: \"kubernetes.io/projected/9c38ee1b-f029-4d5d-91cc-fd29714708b3-kube-api-access-ztz78\") pod \"nova-api-0\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " pod="openstack/nova-api-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.911550 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c38ee1b-f029-4d5d-91cc-fd29714708b3-config-data\") pod \"nova-api-0\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " pod="openstack/nova-api-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.911592 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c38ee1b-f029-4d5d-91cc-fd29714708b3-logs\") pod \"nova-api-0\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " pod="openstack/nova-api-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.911616 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ab87f8-b746-4da6-9174-02f3c052d845-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"09ab87f8-b746-4da6-9174-02f3c052d845\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.911656 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9w5z\" (UniqueName: \"kubernetes.io/projected/09ab87f8-b746-4da6-9174-02f3c052d845-kube-api-access-m9w5z\") pod \"nova-cell1-novncproxy-0\" (UID: \"09ab87f8-b746-4da6-9174-02f3c052d845\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.912615 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c38ee1b-f029-4d5d-91cc-fd29714708b3-logs\") pod \"nova-api-0\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " pod="openstack/nova-api-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.938963 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c38ee1b-f029-4d5d-91cc-fd29714708b3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " pod="openstack/nova-api-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.942911 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.948371 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztz78\" (UniqueName: \"kubernetes.io/projected/9c38ee1b-f029-4d5d-91cc-fd29714708b3-kube-api-access-ztz78\") pod \"nova-api-0\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " pod="openstack/nova-api-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.952125 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c38ee1b-f029-4d5d-91cc-fd29714708b3-config-data\") pod \"nova-api-0\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " pod="openstack/nova-api-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.954306 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.957151 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.991182 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.992543 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 14:38:07 crc kubenswrapper[4902]: I1202 14:38:07.994658 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.006009 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.015276 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9w5z\" (UniqueName: \"kubernetes.io/projected/09ab87f8-b746-4da6-9174-02f3c052d845-kube-api-access-m9w5z\") pod \"nova-cell1-novncproxy-0\" (UID: \"09ab87f8-b746-4da6-9174-02f3c052d845\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.015370 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09ab87f8-b746-4da6-9174-02f3c052d845-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"09ab87f8-b746-4da6-9174-02f3c052d845\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.015473 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ab87f8-b746-4da6-9174-02f3c052d845-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"09ab87f8-b746-4da6-9174-02f3c052d845\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.019121 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09ab87f8-b746-4da6-9174-02f3c052d845-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"09ab87f8-b746-4da6-9174-02f3c052d845\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.027333 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ab87f8-b746-4da6-9174-02f3c052d845-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"09ab87f8-b746-4da6-9174-02f3c052d845\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.037494 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9w5z\" (UniqueName: \"kubernetes.io/projected/09ab87f8-b746-4da6-9174-02f3c052d845-kube-api-access-m9w5z\") pod \"nova-cell1-novncproxy-0\" (UID: \"09ab87f8-b746-4da6-9174-02f3c052d845\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.050937 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.062159 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.079435 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-zrdjh"] Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.082003 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.093465 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-zrdjh"] Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.117390 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70a989f9-1b57-45c9-bdda-004f7f8b1714-logs\") pod \"nova-metadata-0\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " pod="openstack/nova-metadata-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.117430 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1774ade2-b63d-4aca-9bcf-abbd17486f41-config-data\") pod \"nova-scheduler-0\" (UID: \"1774ade2-b63d-4aca-9bcf-abbd17486f41\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.117658 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngkwz\" (UniqueName: \"kubernetes.io/projected/1774ade2-b63d-4aca-9bcf-abbd17486f41-kube-api-access-ngkwz\") pod \"nova-scheduler-0\" (UID: \"1774ade2-b63d-4aca-9bcf-abbd17486f41\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.117690 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70a989f9-1b57-45c9-bdda-004f7f8b1714-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " pod="openstack/nova-metadata-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.117736 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1774ade2-b63d-4aca-9bcf-abbd17486f41-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1774ade2-b63d-4aca-9bcf-abbd17486f41\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.117759 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70a989f9-1b57-45c9-bdda-004f7f8b1714-config-data\") pod \"nova-metadata-0\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " pod="openstack/nova-metadata-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.117786 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6kns\" (UniqueName: \"kubernetes.io/projected/70a989f9-1b57-45c9-bdda-004f7f8b1714-kube-api-access-z6kns\") pod \"nova-metadata-0\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " pod="openstack/nova-metadata-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.175001 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.220466 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70a989f9-1b57-45c9-bdda-004f7f8b1714-logs\") pod \"nova-metadata-0\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " pod="openstack/nova-metadata-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.220874 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1774ade2-b63d-4aca-9bcf-abbd17486f41-config-data\") pod \"nova-scheduler-0\" (UID: \"1774ade2-b63d-4aca-9bcf-abbd17486f41\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.220963 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlzrg\" (UniqueName: \"kubernetes.io/projected/0bd2fab9-91aa-446a-ac26-a404f7b07991-kube-api-access-nlzrg\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.221051 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-dns-svc\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.221115 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.221227 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngkwz\" (UniqueName: \"kubernetes.io/projected/1774ade2-b63d-4aca-9bcf-abbd17486f41-kube-api-access-ngkwz\") pod \"nova-scheduler-0\" (UID: \"1774ade2-b63d-4aca-9bcf-abbd17486f41\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.221259 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.221307 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70a989f9-1b57-45c9-bdda-004f7f8b1714-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " pod="openstack/nova-metadata-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.221381 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1774ade2-b63d-4aca-9bcf-abbd17486f41-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1774ade2-b63d-4aca-9bcf-abbd17486f41\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.221409 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.221513 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70a989f9-1b57-45c9-bdda-004f7f8b1714-config-data\") pod \"nova-metadata-0\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " pod="openstack/nova-metadata-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.221594 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-config\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.221697 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6kns\" (UniqueName: \"kubernetes.io/projected/70a989f9-1b57-45c9-bdda-004f7f8b1714-kube-api-access-z6kns\") pod \"nova-metadata-0\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " pod="openstack/nova-metadata-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.223698 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70a989f9-1b57-45c9-bdda-004f7f8b1714-logs\") pod \"nova-metadata-0\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " pod="openstack/nova-metadata-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.231228 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70a989f9-1b57-45c9-bdda-004f7f8b1714-config-data\") pod \"nova-metadata-0\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " pod="openstack/nova-metadata-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.232126 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1774ade2-b63d-4aca-9bcf-abbd17486f41-config-data\") pod \"nova-scheduler-0\" (UID: \"1774ade2-b63d-4aca-9bcf-abbd17486f41\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.239335 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70a989f9-1b57-45c9-bdda-004f7f8b1714-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " pod="openstack/nova-metadata-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.246605 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1774ade2-b63d-4aca-9bcf-abbd17486f41-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1774ade2-b63d-4aca-9bcf-abbd17486f41\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.249170 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6kns\" (UniqueName: \"kubernetes.io/projected/70a989f9-1b57-45c9-bdda-004f7f8b1714-kube-api-access-z6kns\") pod \"nova-metadata-0\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " pod="openstack/nova-metadata-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.251065 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngkwz\" (UniqueName: \"kubernetes.io/projected/1774ade2-b63d-4aca-9bcf-abbd17486f41-kube-api-access-ngkwz\") pod \"nova-scheduler-0\" (UID: \"1774ade2-b63d-4aca-9bcf-abbd17486f41\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.301798 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.323595 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.323740 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.323799 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-config\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.323918 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlzrg\" (UniqueName: \"kubernetes.io/projected/0bd2fab9-91aa-446a-ac26-a404f7b07991-kube-api-access-nlzrg\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.323977 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-dns-svc\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.324014 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.325640 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.327153 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.328109 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.329070 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-config\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.333395 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-dns-svc\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.367457 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlzrg\" (UniqueName: \"kubernetes.io/projected/0bd2fab9-91aa-446a-ac26-a404f7b07991-kube-api-access-nlzrg\") pod \"dnsmasq-dns-757b4f8459-zrdjh\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.376955 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.394085 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.402225 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.578627 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.660130 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-ndfkx"] Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.661520 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.665064 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.665270 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.674921 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-ndfkx"] Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.732148 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-jx5xc"] Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.733484 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-config-data\") pod \"nova-cell1-conductor-db-sync-ndfkx\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.733525 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p94pg\" (UniqueName: \"kubernetes.io/projected/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-kube-api-access-p94pg\") pod \"nova-cell1-conductor-db-sync-ndfkx\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.733638 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-ndfkx\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.733871 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-scripts\") pod \"nova-cell1-conductor-db-sync-ndfkx\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.835672 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-ndfkx\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.836027 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-scripts\") pod \"nova-cell1-conductor-db-sync-ndfkx\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.836078 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-config-data\") pod \"nova-cell1-conductor-db-sync-ndfkx\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.836096 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p94pg\" (UniqueName: \"kubernetes.io/projected/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-kube-api-access-p94pg\") pod \"nova-cell1-conductor-db-sync-ndfkx\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.856947 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-config-data\") pod \"nova-cell1-conductor-db-sync-ndfkx\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.856990 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-ndfkx\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.857082 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-scripts\") pod \"nova-cell1-conductor-db-sync-ndfkx\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:08 crc kubenswrapper[4902]: I1202 14:38:08.872091 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p94pg\" (UniqueName: \"kubernetes.io/projected/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-kube-api-access-p94pg\") pod \"nova-cell1-conductor-db-sync-ndfkx\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:09 crc kubenswrapper[4902]: I1202 14:38:08.999429 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 14:38:09 crc kubenswrapper[4902]: W1202 14:38:09.003788 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09ab87f8_b746_4da6_9174_02f3c052d845.slice/crio-5237331740d372b6770c24533aade30eb594183c55849603728e0798fbbaadec WatchSource:0}: Error finding container 5237331740d372b6770c24533aade30eb594183c55849603728e0798fbbaadec: Status 404 returned error can't find the container with id 5237331740d372b6770c24533aade30eb594183c55849603728e0798fbbaadec Dec 02 14:38:09 crc kubenswrapper[4902]: I1202 14:38:09.004196 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:09 crc kubenswrapper[4902]: I1202 14:38:09.054298 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-zrdjh"] Dec 02 14:38:09 crc kubenswrapper[4902]: W1202 14:38:09.074489 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0bd2fab9_91aa_446a_ac26_a404f7b07991.slice/crio-119e9b5e25efb15cc64d7a61e03dc3538c6030182d5fd27d922f4d27e84ae7c3 WatchSource:0}: Error finding container 119e9b5e25efb15cc64d7a61e03dc3538c6030182d5fd27d922f4d27e84ae7c3: Status 404 returned error can't find the container with id 119e9b5e25efb15cc64d7a61e03dc3538c6030182d5fd27d922f4d27e84ae7c3 Dec 02 14:38:09 crc kubenswrapper[4902]: I1202 14:38:09.159491 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 14:38:09 crc kubenswrapper[4902]: I1202 14:38:09.228537 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:38:09 crc kubenswrapper[4902]: I1202 14:38:09.447229 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"09ab87f8-b746-4da6-9174-02f3c052d845","Type":"ContainerStarted","Data":"5237331740d372b6770c24533aade30eb594183c55849603728e0798fbbaadec"} Dec 02 14:38:09 crc kubenswrapper[4902]: I1202 14:38:09.455226 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" event={"ID":"0bd2fab9-91aa-446a-ac26-a404f7b07991","Type":"ContainerStarted","Data":"6974c5a66986c47cd7e4317473b42a9fac7827c6bc787b13749bc441db58f520"} Dec 02 14:38:09 crc kubenswrapper[4902]: I1202 14:38:09.455403 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" event={"ID":"0bd2fab9-91aa-446a-ac26-a404f7b07991","Type":"ContainerStarted","Data":"119e9b5e25efb15cc64d7a61e03dc3538c6030182d5fd27d922f4d27e84ae7c3"} Dec 02 14:38:09 crc kubenswrapper[4902]: I1202 14:38:09.460701 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-jx5xc" event={"ID":"117c518f-dc8f-49d4-a389-a42e671ad97a","Type":"ContainerStarted","Data":"d7b951b59f2ae69c176ac3600b2428084bc60abc8ac64947ff54327ed0c426b0"} Dec 02 14:38:09 crc kubenswrapper[4902]: I1202 14:38:09.460759 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-jx5xc" event={"ID":"117c518f-dc8f-49d4-a389-a42e671ad97a","Type":"ContainerStarted","Data":"fc6d076452c5630e121111e6f26be74f4aea5b8afcb43683530eba4604e7e24d"} Dec 02 14:38:09 crc kubenswrapper[4902]: I1202 14:38:09.465636 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1774ade2-b63d-4aca-9bcf-abbd17486f41","Type":"ContainerStarted","Data":"9e56786da89be85eb64bab0cc6958fd8baf81ec3675f3b9be5ea361bfdb81e78"} Dec 02 14:38:09 crc kubenswrapper[4902]: I1202 14:38:09.466655 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"70a989f9-1b57-45c9-bdda-004f7f8b1714","Type":"ContainerStarted","Data":"1c1d99433356d755a95fd0f1dd49b1c7c8b314ec556071645f35ae7db70d46f0"} Dec 02 14:38:09 crc kubenswrapper[4902]: I1202 14:38:09.467743 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c38ee1b-f029-4d5d-91cc-fd29714708b3","Type":"ContainerStarted","Data":"c3c573089d93417d015c606015c340cf87368586e34e7b5c92a9b68306364514"} Dec 02 14:38:09 crc kubenswrapper[4902]: I1202 14:38:09.509242 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-jx5xc" podStartSLOduration=2.509221522 podStartE2EDuration="2.509221522s" podCreationTimestamp="2025-12-02 14:38:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:38:09.504596041 +0000 UTC m=+1320.695904750" watchObservedRunningTime="2025-12-02 14:38:09.509221522 +0000 UTC m=+1320.700530231" Dec 02 14:38:09 crc kubenswrapper[4902]: I1202 14:38:09.615164 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-ndfkx"] Dec 02 14:38:10 crc kubenswrapper[4902]: I1202 14:38:10.478779 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-ndfkx" event={"ID":"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446","Type":"ContainerStarted","Data":"be48170d4b7c7fea223089a271804833260148431c52db31a0d5c5cff2a53a26"} Dec 02 14:38:10 crc kubenswrapper[4902]: I1202 14:38:10.479067 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-ndfkx" event={"ID":"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446","Type":"ContainerStarted","Data":"665d22cdde7799c47dbbeda0ce7deb02be2a7f4ee7e964410878ec6e3c601ab2"} Dec 02 14:38:10 crc kubenswrapper[4902]: I1202 14:38:10.484139 4902 generic.go:334] "Generic (PLEG): container finished" podID="0bd2fab9-91aa-446a-ac26-a404f7b07991" containerID="6974c5a66986c47cd7e4317473b42a9fac7827c6bc787b13749bc441db58f520" exitCode=0 Dec 02 14:38:10 crc kubenswrapper[4902]: I1202 14:38:10.484898 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" event={"ID":"0bd2fab9-91aa-446a-ac26-a404f7b07991","Type":"ContainerDied","Data":"6974c5a66986c47cd7e4317473b42a9fac7827c6bc787b13749bc441db58f520"} Dec 02 14:38:10 crc kubenswrapper[4902]: I1202 14:38:10.484924 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" event={"ID":"0bd2fab9-91aa-446a-ac26-a404f7b07991","Type":"ContainerStarted","Data":"262b6b6bced72ecfcaa62d4a2c35db7816b1312b1d25cdad15d0372c5fa70532"} Dec 02 14:38:10 crc kubenswrapper[4902]: I1202 14:38:10.485183 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:10 crc kubenswrapper[4902]: I1202 14:38:10.502028 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-ndfkx" podStartSLOduration=2.50199901 podStartE2EDuration="2.50199901s" podCreationTimestamp="2025-12-02 14:38:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:38:10.498731288 +0000 UTC m=+1321.690039997" watchObservedRunningTime="2025-12-02 14:38:10.50199901 +0000 UTC m=+1321.693307719" Dec 02 14:38:10 crc kubenswrapper[4902]: I1202 14:38:10.524496 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" podStartSLOduration=3.5244728370000002 podStartE2EDuration="3.524472837s" podCreationTimestamp="2025-12-02 14:38:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:38:10.514014881 +0000 UTC m=+1321.705323590" watchObservedRunningTime="2025-12-02 14:38:10.524472837 +0000 UTC m=+1321.715781546" Dec 02 14:38:11 crc kubenswrapper[4902]: I1202 14:38:11.247052 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 14:38:11 crc kubenswrapper[4902]: I1202 14:38:11.266270 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:38:13 crc kubenswrapper[4902]: I1202 14:38:13.533281 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c38ee1b-f029-4d5d-91cc-fd29714708b3","Type":"ContainerStarted","Data":"4ce98fcf1e22cc85002df3dc171642501e15808aa77e3466810ca69ff375282f"} Dec 02 14:38:13 crc kubenswrapper[4902]: I1202 14:38:13.538634 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"09ab87f8-b746-4da6-9174-02f3c052d845","Type":"ContainerStarted","Data":"80b3d2426a8e52f400c78ee9f9e41e683f1cd86fdcb5f0577a1a9909bdac199c"} Dec 02 14:38:13 crc kubenswrapper[4902]: I1202 14:38:13.538804 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="09ab87f8-b746-4da6-9174-02f3c052d845" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://80b3d2426a8e52f400c78ee9f9e41e683f1cd86fdcb5f0577a1a9909bdac199c" gracePeriod=30 Dec 02 14:38:13 crc kubenswrapper[4902]: I1202 14:38:13.548549 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1774ade2-b63d-4aca-9bcf-abbd17486f41","Type":"ContainerStarted","Data":"b34b37e8599748f97dfb81bc78707e8ea59ebac3896c794eeb3b2cfc0f190a04"} Dec 02 14:38:13 crc kubenswrapper[4902]: I1202 14:38:13.562863 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"70a989f9-1b57-45c9-bdda-004f7f8b1714","Type":"ContainerStarted","Data":"2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff"} Dec 02 14:38:13 crc kubenswrapper[4902]: I1202 14:38:13.570139 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.576997152 podStartE2EDuration="6.570115604s" podCreationTimestamp="2025-12-02 14:38:07 +0000 UTC" firstStartedPulling="2025-12-02 14:38:09.032309485 +0000 UTC m=+1320.223618194" lastFinishedPulling="2025-12-02 14:38:13.025427937 +0000 UTC m=+1324.216736646" observedRunningTime="2025-12-02 14:38:13.56008849 +0000 UTC m=+1324.751397199" watchObservedRunningTime="2025-12-02 14:38:13.570115604 +0000 UTC m=+1324.761424323" Dec 02 14:38:13 crc kubenswrapper[4902]: I1202 14:38:13.585630 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.794816611 podStartE2EDuration="6.585611473s" podCreationTimestamp="2025-12-02 14:38:07 +0000 UTC" firstStartedPulling="2025-12-02 14:38:09.225290031 +0000 UTC m=+1320.416598740" lastFinishedPulling="2025-12-02 14:38:13.016084893 +0000 UTC m=+1324.207393602" observedRunningTime="2025-12-02 14:38:13.57633156 +0000 UTC m=+1324.767640269" watchObservedRunningTime="2025-12-02 14:38:13.585611473 +0000 UTC m=+1324.776920182" Dec 02 14:38:14 crc kubenswrapper[4902]: I1202 14:38:14.577368 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"70a989f9-1b57-45c9-bdda-004f7f8b1714","Type":"ContainerStarted","Data":"24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a"} Dec 02 14:38:14 crc kubenswrapper[4902]: I1202 14:38:14.577416 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="70a989f9-1b57-45c9-bdda-004f7f8b1714" containerName="nova-metadata-log" containerID="cri-o://2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff" gracePeriod=30 Dec 02 14:38:14 crc kubenswrapper[4902]: I1202 14:38:14.577603 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="70a989f9-1b57-45c9-bdda-004f7f8b1714" containerName="nova-metadata-metadata" containerID="cri-o://24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a" gracePeriod=30 Dec 02 14:38:14 crc kubenswrapper[4902]: I1202 14:38:14.586837 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c38ee1b-f029-4d5d-91cc-fd29714708b3","Type":"ContainerStarted","Data":"52f9bbd4b4254f522f172ea151c743cc578e7d990d678d2a2da8f0349ec532f2"} Dec 02 14:38:14 crc kubenswrapper[4902]: I1202 14:38:14.618539 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.847982199 podStartE2EDuration="7.618514347s" podCreationTimestamp="2025-12-02 14:38:07 +0000 UTC" firstStartedPulling="2025-12-02 14:38:09.250714711 +0000 UTC m=+1320.442023420" lastFinishedPulling="2025-12-02 14:38:13.021246859 +0000 UTC m=+1324.212555568" observedRunningTime="2025-12-02 14:38:14.604924452 +0000 UTC m=+1325.796233171" watchObservedRunningTime="2025-12-02 14:38:14.618514347 +0000 UTC m=+1325.809823066" Dec 02 14:38:14 crc kubenswrapper[4902]: I1202 14:38:14.642634 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.317843814 podStartE2EDuration="7.642608359s" podCreationTimestamp="2025-12-02 14:38:07 +0000 UTC" firstStartedPulling="2025-12-02 14:38:08.692023638 +0000 UTC m=+1319.883332347" lastFinishedPulling="2025-12-02 14:38:13.016788163 +0000 UTC m=+1324.208096892" observedRunningTime="2025-12-02 14:38:14.631776322 +0000 UTC m=+1325.823085041" watchObservedRunningTime="2025-12-02 14:38:14.642608359 +0000 UTC m=+1325.833917088" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.220942 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.304407 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6kns\" (UniqueName: \"kubernetes.io/projected/70a989f9-1b57-45c9-bdda-004f7f8b1714-kube-api-access-z6kns\") pod \"70a989f9-1b57-45c9-bdda-004f7f8b1714\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.304488 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70a989f9-1b57-45c9-bdda-004f7f8b1714-combined-ca-bundle\") pod \"70a989f9-1b57-45c9-bdda-004f7f8b1714\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.304623 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70a989f9-1b57-45c9-bdda-004f7f8b1714-config-data\") pod \"70a989f9-1b57-45c9-bdda-004f7f8b1714\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.304706 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70a989f9-1b57-45c9-bdda-004f7f8b1714-logs\") pod \"70a989f9-1b57-45c9-bdda-004f7f8b1714\" (UID: \"70a989f9-1b57-45c9-bdda-004f7f8b1714\") " Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.305520 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70a989f9-1b57-45c9-bdda-004f7f8b1714-logs" (OuterVolumeSpecName: "logs") pod "70a989f9-1b57-45c9-bdda-004f7f8b1714" (UID: "70a989f9-1b57-45c9-bdda-004f7f8b1714"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.313471 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70a989f9-1b57-45c9-bdda-004f7f8b1714-kube-api-access-z6kns" (OuterVolumeSpecName: "kube-api-access-z6kns") pod "70a989f9-1b57-45c9-bdda-004f7f8b1714" (UID: "70a989f9-1b57-45c9-bdda-004f7f8b1714"). InnerVolumeSpecName "kube-api-access-z6kns". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.345034 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70a989f9-1b57-45c9-bdda-004f7f8b1714-config-data" (OuterVolumeSpecName: "config-data") pod "70a989f9-1b57-45c9-bdda-004f7f8b1714" (UID: "70a989f9-1b57-45c9-bdda-004f7f8b1714"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.352392 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70a989f9-1b57-45c9-bdda-004f7f8b1714-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70a989f9-1b57-45c9-bdda-004f7f8b1714" (UID: "70a989f9-1b57-45c9-bdda-004f7f8b1714"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.408640 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70a989f9-1b57-45c9-bdda-004f7f8b1714-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.408737 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6kns\" (UniqueName: \"kubernetes.io/projected/70a989f9-1b57-45c9-bdda-004f7f8b1714-kube-api-access-z6kns\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.408805 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70a989f9-1b57-45c9-bdda-004f7f8b1714-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.408836 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70a989f9-1b57-45c9-bdda-004f7f8b1714-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.598001 4902 generic.go:334] "Generic (PLEG): container finished" podID="70a989f9-1b57-45c9-bdda-004f7f8b1714" containerID="24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a" exitCode=0 Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.598041 4902 generic.go:334] "Generic (PLEG): container finished" podID="70a989f9-1b57-45c9-bdda-004f7f8b1714" containerID="2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff" exitCode=143 Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.599394 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.599703 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"70a989f9-1b57-45c9-bdda-004f7f8b1714","Type":"ContainerDied","Data":"24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a"} Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.599771 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"70a989f9-1b57-45c9-bdda-004f7f8b1714","Type":"ContainerDied","Data":"2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff"} Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.599787 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"70a989f9-1b57-45c9-bdda-004f7f8b1714","Type":"ContainerDied","Data":"1c1d99433356d755a95fd0f1dd49b1c7c8b314ec556071645f35ae7db70d46f0"} Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.599809 4902 scope.go:117] "RemoveContainer" containerID="24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.622428 4902 scope.go:117] "RemoveContainer" containerID="2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.658588 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.666098 4902 scope.go:117] "RemoveContainer" containerID="24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a" Dec 02 14:38:15 crc kubenswrapper[4902]: E1202 14:38:15.666866 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a\": container with ID starting with 24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a not found: ID does not exist" containerID="24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.666914 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a"} err="failed to get container status \"24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a\": rpc error: code = NotFound desc = could not find container \"24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a\": container with ID starting with 24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a not found: ID does not exist" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.666943 4902 scope.go:117] "RemoveContainer" containerID="2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff" Dec 02 14:38:15 crc kubenswrapper[4902]: E1202 14:38:15.667551 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff\": container with ID starting with 2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff not found: ID does not exist" containerID="2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.667630 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff"} err="failed to get container status \"2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff\": rpc error: code = NotFound desc = could not find container \"2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff\": container with ID starting with 2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff not found: ID does not exist" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.667668 4902 scope.go:117] "RemoveContainer" containerID="24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.668108 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a"} err="failed to get container status \"24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a\": rpc error: code = NotFound desc = could not find container \"24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a\": container with ID starting with 24cbf9e968372f827a1117299e564e331ab9ecb3f5de432960aae479ed2e663a not found: ID does not exist" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.668139 4902 scope.go:117] "RemoveContainer" containerID="2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.668518 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff"} err="failed to get container status \"2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff\": rpc error: code = NotFound desc = could not find container \"2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff\": container with ID starting with 2f0ee4ce93cf1564e2c16c6110cc167cf4de5c38137fb6831e82a1bbb61788ff not found: ID does not exist" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.673229 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.695646 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:38:15 crc kubenswrapper[4902]: E1202 14:38:15.696193 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70a989f9-1b57-45c9-bdda-004f7f8b1714" containerName="nova-metadata-metadata" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.696217 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="70a989f9-1b57-45c9-bdda-004f7f8b1714" containerName="nova-metadata-metadata" Dec 02 14:38:15 crc kubenswrapper[4902]: E1202 14:38:15.696253 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70a989f9-1b57-45c9-bdda-004f7f8b1714" containerName="nova-metadata-log" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.696259 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="70a989f9-1b57-45c9-bdda-004f7f8b1714" containerName="nova-metadata-log" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.696458 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="70a989f9-1b57-45c9-bdda-004f7f8b1714" containerName="nova-metadata-metadata" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.696482 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="70a989f9-1b57-45c9-bdda-004f7f8b1714" containerName="nova-metadata-log" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.697651 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.700428 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.700813 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.720939 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.817401 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-config-data\") pod \"nova-metadata-0\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.817744 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.817877 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a363025-47e0-4959-89c1-23184523a6ad-logs\") pod \"nova-metadata-0\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.818231 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlvlm\" (UniqueName: \"kubernetes.io/projected/5a363025-47e0-4959-89c1-23184523a6ad-kube-api-access-wlvlm\") pod \"nova-metadata-0\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.818309 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.920427 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.920474 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a363025-47e0-4959-89c1-23184523a6ad-logs\") pod \"nova-metadata-0\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.920545 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlvlm\" (UniqueName: \"kubernetes.io/projected/5a363025-47e0-4959-89c1-23184523a6ad-kube-api-access-wlvlm\") pod \"nova-metadata-0\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.920585 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.920630 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-config-data\") pod \"nova-metadata-0\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.921242 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a363025-47e0-4959-89c1-23184523a6ad-logs\") pod \"nova-metadata-0\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.944154 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-config-data\") pod \"nova-metadata-0\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.944183 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.944296 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " pod="openstack/nova-metadata-0" Dec 02 14:38:15 crc kubenswrapper[4902]: I1202 14:38:15.949319 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlvlm\" (UniqueName: \"kubernetes.io/projected/5a363025-47e0-4959-89c1-23184523a6ad-kube-api-access-wlvlm\") pod \"nova-metadata-0\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " pod="openstack/nova-metadata-0" Dec 02 14:38:16 crc kubenswrapper[4902]: I1202 14:38:16.035550 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 14:38:16 crc kubenswrapper[4902]: I1202 14:38:16.573277 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:38:16 crc kubenswrapper[4902]: I1202 14:38:16.617455 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5a363025-47e0-4959-89c1-23184523a6ad","Type":"ContainerStarted","Data":"c14de608c8c336ba6919156c453171d0368e20b276ac475d6e7fdf5c825aa4e9"} Dec 02 14:38:17 crc kubenswrapper[4902]: I1202 14:38:17.123110 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70a989f9-1b57-45c9-bdda-004f7f8b1714" path="/var/lib/kubelet/pods/70a989f9-1b57-45c9-bdda-004f7f8b1714/volumes" Dec 02 14:38:17 crc kubenswrapper[4902]: I1202 14:38:17.628295 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5a363025-47e0-4959-89c1-23184523a6ad","Type":"ContainerStarted","Data":"3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81"} Dec 02 14:38:17 crc kubenswrapper[4902]: I1202 14:38:17.628336 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5a363025-47e0-4959-89c1-23184523a6ad","Type":"ContainerStarted","Data":"15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070"} Dec 02 14:38:17 crc kubenswrapper[4902]: I1202 14:38:17.635162 4902 generic.go:334] "Generic (PLEG): container finished" podID="117c518f-dc8f-49d4-a389-a42e671ad97a" containerID="d7b951b59f2ae69c176ac3600b2428084bc60abc8ac64947ff54327ed0c426b0" exitCode=0 Dec 02 14:38:17 crc kubenswrapper[4902]: I1202 14:38:17.635210 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-jx5xc" event={"ID":"117c518f-dc8f-49d4-a389-a42e671ad97a","Type":"ContainerDied","Data":"d7b951b59f2ae69c176ac3600b2428084bc60abc8ac64947ff54327ed0c426b0"} Dec 02 14:38:17 crc kubenswrapper[4902]: I1202 14:38:17.651375 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.651354793 podStartE2EDuration="2.651354793s" podCreationTimestamp="2025-12-02 14:38:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:38:17.648989376 +0000 UTC m=+1328.840298085" watchObservedRunningTime="2025-12-02 14:38:17.651354793 +0000 UTC m=+1328.842663502" Dec 02 14:38:18 crc kubenswrapper[4902]: I1202 14:38:18.006217 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 14:38:18 crc kubenswrapper[4902]: I1202 14:38:18.006498 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 14:38:18 crc kubenswrapper[4902]: I1202 14:38:18.302924 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:18 crc kubenswrapper[4902]: I1202 14:38:18.395547 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 02 14:38:18 crc kubenswrapper[4902]: I1202 14:38:18.396109 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 02 14:38:18 crc kubenswrapper[4902]: I1202 14:38:18.404788 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:18 crc kubenswrapper[4902]: I1202 14:38:18.463501 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 02 14:38:18 crc kubenswrapper[4902]: I1202 14:38:18.509095 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-vtdqn"] Dec 02 14:38:18 crc kubenswrapper[4902]: I1202 14:38:18.509431 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" podUID="bddd6267-1a8a-4783-a674-4037dc5c52da" containerName="dnsmasq-dns" containerID="cri-o://80f367b146682ff16c836c98a05279148d5d7a5dc75a96004b78b56a7d46cbba" gracePeriod=10 Dec 02 14:38:18 crc kubenswrapper[4902]: I1202 14:38:18.650265 4902 generic.go:334] "Generic (PLEG): container finished" podID="bddd6267-1a8a-4783-a674-4037dc5c52da" containerID="80f367b146682ff16c836c98a05279148d5d7a5dc75a96004b78b56a7d46cbba" exitCode=0 Dec 02 14:38:18 crc kubenswrapper[4902]: I1202 14:38:18.650336 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" event={"ID":"bddd6267-1a8a-4783-a674-4037dc5c52da","Type":"ContainerDied","Data":"80f367b146682ff16c836c98a05279148d5d7a5dc75a96004b78b56a7d46cbba"} Dec 02 14:38:18 crc kubenswrapper[4902]: I1202 14:38:18.651776 4902 generic.go:334] "Generic (PLEG): container finished" podID="d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446" containerID="be48170d4b7c7fea223089a271804833260148431c52db31a0d5c5cff2a53a26" exitCode=0 Dec 02 14:38:18 crc kubenswrapper[4902]: I1202 14:38:18.653262 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-ndfkx" event={"ID":"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446","Type":"ContainerDied","Data":"be48170d4b7c7fea223089a271804833260148431c52db31a0d5c5cff2a53a26"} Dec 02 14:38:18 crc kubenswrapper[4902]: I1202 14:38:18.705396 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 02 14:38:18 crc kubenswrapper[4902]: I1202 14:38:18.910162 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" podUID="bddd6267-1a8a-4783-a674-4037dc5c52da" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.181:5353: connect: connection refused" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.089855 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9c38ee1b-f029-4d5d-91cc-fd29714708b3" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.204:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.090526 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9c38ee1b-f029-4d5d-91cc-fd29714708b3" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.204:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.204709 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.210939 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.289820 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6pcs\" (UniqueName: \"kubernetes.io/projected/117c518f-dc8f-49d4-a389-a42e671ad97a-kube-api-access-j6pcs\") pod \"117c518f-dc8f-49d4-a389-a42e671ad97a\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.289879 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-scripts\") pod \"117c518f-dc8f-49d4-a389-a42e671ad97a\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.289907 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-ovsdbserver-sb\") pod \"bddd6267-1a8a-4783-a674-4037dc5c52da\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.289930 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-dns-swift-storage-0\") pod \"bddd6267-1a8a-4783-a674-4037dc5c52da\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.290032 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-combined-ca-bundle\") pod \"117c518f-dc8f-49d4-a389-a42e671ad97a\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.290090 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-config\") pod \"bddd6267-1a8a-4783-a674-4037dc5c52da\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.290146 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-ovsdbserver-nb\") pod \"bddd6267-1a8a-4783-a674-4037dc5c52da\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.290165 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-config-data\") pod \"117c518f-dc8f-49d4-a389-a42e671ad97a\" (UID: \"117c518f-dc8f-49d4-a389-a42e671ad97a\") " Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.290204 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-dns-svc\") pod \"bddd6267-1a8a-4783-a674-4037dc5c52da\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.290228 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcsrw\" (UniqueName: \"kubernetes.io/projected/bddd6267-1a8a-4783-a674-4037dc5c52da-kube-api-access-zcsrw\") pod \"bddd6267-1a8a-4783-a674-4037dc5c52da\" (UID: \"bddd6267-1a8a-4783-a674-4037dc5c52da\") " Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.300280 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-scripts" (OuterVolumeSpecName: "scripts") pod "117c518f-dc8f-49d4-a389-a42e671ad97a" (UID: "117c518f-dc8f-49d4-a389-a42e671ad97a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.302395 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/117c518f-dc8f-49d4-a389-a42e671ad97a-kube-api-access-j6pcs" (OuterVolumeSpecName: "kube-api-access-j6pcs") pod "117c518f-dc8f-49d4-a389-a42e671ad97a" (UID: "117c518f-dc8f-49d4-a389-a42e671ad97a"). InnerVolumeSpecName "kube-api-access-j6pcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.330327 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bddd6267-1a8a-4783-a674-4037dc5c52da-kube-api-access-zcsrw" (OuterVolumeSpecName: "kube-api-access-zcsrw") pod "bddd6267-1a8a-4783-a674-4037dc5c52da" (UID: "bddd6267-1a8a-4783-a674-4037dc5c52da"). InnerVolumeSpecName "kube-api-access-zcsrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.349695 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "117c518f-dc8f-49d4-a389-a42e671ad97a" (UID: "117c518f-dc8f-49d4-a389-a42e671ad97a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.370238 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bddd6267-1a8a-4783-a674-4037dc5c52da" (UID: "bddd6267-1a8a-4783-a674-4037dc5c52da"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.379759 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-config-data" (OuterVolumeSpecName: "config-data") pod "117c518f-dc8f-49d4-a389-a42e671ad97a" (UID: "117c518f-dc8f-49d4-a389-a42e671ad97a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.392005 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.392052 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcsrw\" (UniqueName: \"kubernetes.io/projected/bddd6267-1a8a-4783-a674-4037dc5c52da-kube-api-access-zcsrw\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.392069 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6pcs\" (UniqueName: \"kubernetes.io/projected/117c518f-dc8f-49d4-a389-a42e671ad97a-kube-api-access-j6pcs\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.392080 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.392091 4902 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.392105 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/117c518f-dc8f-49d4-a389-a42e671ad97a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.392608 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bddd6267-1a8a-4783-a674-4037dc5c52da" (UID: "bddd6267-1a8a-4783-a674-4037dc5c52da"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.393427 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bddd6267-1a8a-4783-a674-4037dc5c52da" (UID: "bddd6267-1a8a-4783-a674-4037dc5c52da"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.393757 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bddd6267-1a8a-4783-a674-4037dc5c52da" (UID: "bddd6267-1a8a-4783-a674-4037dc5c52da"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.396673 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-config" (OuterVolumeSpecName: "config") pod "bddd6267-1a8a-4783-a674-4037dc5c52da" (UID: "bddd6267-1a8a-4783-a674-4037dc5c52da"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.493787 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.493830 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.493841 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.493849 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bddd6267-1a8a-4783-a674-4037dc5c52da-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.664038 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-jx5xc" event={"ID":"117c518f-dc8f-49d4-a389-a42e671ad97a","Type":"ContainerDied","Data":"fc6d076452c5630e121111e6f26be74f4aea5b8afcb43683530eba4604e7e24d"} Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.664099 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc6d076452c5630e121111e6f26be74f4aea5b8afcb43683530eba4604e7e24d" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.664180 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-jx5xc" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.674864 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" event={"ID":"bddd6267-1a8a-4783-a674-4037dc5c52da","Type":"ContainerDied","Data":"b7d5dd633050bcd2bfbac0acb36094d4b7b7de00ad465c153c34adcdd4a2e011"} Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.675113 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-vtdqn" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.675258 4902 scope.go:117] "RemoveContainer" containerID="80f367b146682ff16c836c98a05279148d5d7a5dc75a96004b78b56a7d46cbba" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.744373 4902 scope.go:117] "RemoveContainer" containerID="4515363a23d7504efb117817b199725fa08dd4d30ceaf6dea537d46045254e29" Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.751112 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-vtdqn"] Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.764904 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-vtdqn"] Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.871666 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.871940 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9c38ee1b-f029-4d5d-91cc-fd29714708b3" containerName="nova-api-log" containerID="cri-o://4ce98fcf1e22cc85002df3dc171642501e15808aa77e3466810ca69ff375282f" gracePeriod=30 Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.872364 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9c38ee1b-f029-4d5d-91cc-fd29714708b3" containerName="nova-api-api" containerID="cri-o://52f9bbd4b4254f522f172ea151c743cc578e7d990d678d2a2da8f0349ec532f2" gracePeriod=30 Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.892349 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.952427 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.952674 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5a363025-47e0-4959-89c1-23184523a6ad" containerName="nova-metadata-log" containerID="cri-o://15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070" gracePeriod=30 Dec 02 14:38:19 crc kubenswrapper[4902]: I1202 14:38:19.953706 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5a363025-47e0-4959-89c1-23184523a6ad" containerName="nova-metadata-metadata" containerID="cri-o://3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81" gracePeriod=30 Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.132514 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.206969 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p94pg\" (UniqueName: \"kubernetes.io/projected/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-kube-api-access-p94pg\") pod \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.207031 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-config-data\") pod \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.207099 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-combined-ca-bundle\") pod \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.207216 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-scripts\") pod \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\" (UID: \"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446\") " Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.212650 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-kube-api-access-p94pg" (OuterVolumeSpecName: "kube-api-access-p94pg") pod "d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446" (UID: "d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446"). InnerVolumeSpecName "kube-api-access-p94pg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.214215 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-scripts" (OuterVolumeSpecName: "scripts") pod "d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446" (UID: "d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.238942 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-config-data" (OuterVolumeSpecName: "config-data") pod "d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446" (UID: "d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.240934 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446" (UID: "d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.311890 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p94pg\" (UniqueName: \"kubernetes.io/projected/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-kube-api-access-p94pg\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.311936 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.311948 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.311956 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.470404 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.616457 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-nova-metadata-tls-certs\") pod \"5a363025-47e0-4959-89c1-23184523a6ad\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.616554 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-config-data\") pod \"5a363025-47e0-4959-89c1-23184523a6ad\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.616667 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlvlm\" (UniqueName: \"kubernetes.io/projected/5a363025-47e0-4959-89c1-23184523a6ad-kube-api-access-wlvlm\") pod \"5a363025-47e0-4959-89c1-23184523a6ad\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.616735 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a363025-47e0-4959-89c1-23184523a6ad-logs\") pod \"5a363025-47e0-4959-89c1-23184523a6ad\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.616760 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-combined-ca-bundle\") pod \"5a363025-47e0-4959-89c1-23184523a6ad\" (UID: \"5a363025-47e0-4959-89c1-23184523a6ad\") " Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.617714 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a363025-47e0-4959-89c1-23184523a6ad-logs" (OuterVolumeSpecName: "logs") pod "5a363025-47e0-4959-89c1-23184523a6ad" (UID: "5a363025-47e0-4959-89c1-23184523a6ad"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.621788 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a363025-47e0-4959-89c1-23184523a6ad-kube-api-access-wlvlm" (OuterVolumeSpecName: "kube-api-access-wlvlm") pod "5a363025-47e0-4959-89c1-23184523a6ad" (UID: "5a363025-47e0-4959-89c1-23184523a6ad"). InnerVolumeSpecName "kube-api-access-wlvlm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.670708 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a363025-47e0-4959-89c1-23184523a6ad" (UID: "5a363025-47e0-4959-89c1-23184523a6ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.670720 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-config-data" (OuterVolumeSpecName: "config-data") pod "5a363025-47e0-4959-89c1-23184523a6ad" (UID: "5a363025-47e0-4959-89c1-23184523a6ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.718781 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.719042 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlvlm\" (UniqueName: \"kubernetes.io/projected/5a363025-47e0-4959-89c1-23184523a6ad-kube-api-access-wlvlm\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.719131 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a363025-47e0-4959-89c1-23184523a6ad-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.719207 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.726779 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "5a363025-47e0-4959-89c1-23184523a6ad" (UID: "5a363025-47e0-4959-89c1-23184523a6ad"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.729667 4902 generic.go:334] "Generic (PLEG): container finished" podID="5a363025-47e0-4959-89c1-23184523a6ad" containerID="3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81" exitCode=0 Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.729714 4902 generic.go:334] "Generic (PLEG): container finished" podID="5a363025-47e0-4959-89c1-23184523a6ad" containerID="15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070" exitCode=143 Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.729752 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.729824 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5a363025-47e0-4959-89c1-23184523a6ad","Type":"ContainerDied","Data":"3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81"} Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.729860 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5a363025-47e0-4959-89c1-23184523a6ad","Type":"ContainerDied","Data":"15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070"} Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.729874 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5a363025-47e0-4959-89c1-23184523a6ad","Type":"ContainerDied","Data":"c14de608c8c336ba6919156c453171d0368e20b276ac475d6e7fdf5c825aa4e9"} Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.729910 4902 scope.go:117] "RemoveContainer" containerID="3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.762321 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-ndfkx" event={"ID":"d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446","Type":"ContainerDied","Data":"665d22cdde7799c47dbbeda0ce7deb02be2a7f4ee7e964410878ec6e3c601ab2"} Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.762361 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="665d22cdde7799c47dbbeda0ce7deb02be2a7f4ee7e964410878ec6e3c601ab2" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.762434 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-ndfkx" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.780825 4902 generic.go:334] "Generic (PLEG): container finished" podID="9c38ee1b-f029-4d5d-91cc-fd29714708b3" containerID="4ce98fcf1e22cc85002df3dc171642501e15808aa77e3466810ca69ff375282f" exitCode=143 Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.781052 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="1774ade2-b63d-4aca-9bcf-abbd17486f41" containerName="nova-scheduler-scheduler" containerID="cri-o://b34b37e8599748f97dfb81bc78707e8ea59ebac3896c794eeb3b2cfc0f190a04" gracePeriod=30 Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.781401 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c38ee1b-f029-4d5d-91cc-fd29714708b3","Type":"ContainerDied","Data":"4ce98fcf1e22cc85002df3dc171642501e15808aa77e3466810ca69ff375282f"} Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.787861 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 02 14:38:20 crc kubenswrapper[4902]: E1202 14:38:20.788407 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="117c518f-dc8f-49d4-a389-a42e671ad97a" containerName="nova-manage" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.788431 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="117c518f-dc8f-49d4-a389-a42e671ad97a" containerName="nova-manage" Dec 02 14:38:20 crc kubenswrapper[4902]: E1202 14:38:20.788458 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446" containerName="nova-cell1-conductor-db-sync" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.788467 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446" containerName="nova-cell1-conductor-db-sync" Dec 02 14:38:20 crc kubenswrapper[4902]: E1202 14:38:20.788486 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bddd6267-1a8a-4783-a674-4037dc5c52da" containerName="dnsmasq-dns" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.788495 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="bddd6267-1a8a-4783-a674-4037dc5c52da" containerName="dnsmasq-dns" Dec 02 14:38:20 crc kubenswrapper[4902]: E1202 14:38:20.788512 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bddd6267-1a8a-4783-a674-4037dc5c52da" containerName="init" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.788519 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="bddd6267-1a8a-4783-a674-4037dc5c52da" containerName="init" Dec 02 14:38:20 crc kubenswrapper[4902]: E1202 14:38:20.788527 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a363025-47e0-4959-89c1-23184523a6ad" containerName="nova-metadata-metadata" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.788534 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a363025-47e0-4959-89c1-23184523a6ad" containerName="nova-metadata-metadata" Dec 02 14:38:20 crc kubenswrapper[4902]: E1202 14:38:20.788551 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a363025-47e0-4959-89c1-23184523a6ad" containerName="nova-metadata-log" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.788578 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a363025-47e0-4959-89c1-23184523a6ad" containerName="nova-metadata-log" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.788819 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="bddd6267-1a8a-4783-a674-4037dc5c52da" containerName="dnsmasq-dns" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.788836 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446" containerName="nova-cell1-conductor-db-sync" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.788855 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="117c518f-dc8f-49d4-a389-a42e671ad97a" containerName="nova-manage" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.788878 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a363025-47e0-4959-89c1-23184523a6ad" containerName="nova-metadata-log" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.788896 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a363025-47e0-4959-89c1-23184523a6ad" containerName="nova-metadata-metadata" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.789761 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.798348 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.808056 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.820880 4902 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a363025-47e0-4959-89c1-23184523a6ad-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.879295 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.888621 4902 scope.go:117] "RemoveContainer" containerID="15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.895123 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.904288 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.906214 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.908466 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.908849 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.926514 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eccfb03-21f3-4259-85bc-625274a3b3a8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"9eccfb03-21f3-4259-85bc-625274a3b3a8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.926756 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mfrw\" (UniqueName: \"kubernetes.io/projected/9eccfb03-21f3-4259-85bc-625274a3b3a8-kube-api-access-4mfrw\") pod \"nova-cell1-conductor-0\" (UID: \"9eccfb03-21f3-4259-85bc-625274a3b3a8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.926949 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eccfb03-21f3-4259-85bc-625274a3b3a8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"9eccfb03-21f3-4259-85bc-625274a3b3a8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.933711 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.948716 4902 scope.go:117] "RemoveContainer" containerID="3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81" Dec 02 14:38:20 crc kubenswrapper[4902]: E1202 14:38:20.949266 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81\": container with ID starting with 3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81 not found: ID does not exist" containerID="3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.949309 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81"} err="failed to get container status \"3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81\": rpc error: code = NotFound desc = could not find container \"3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81\": container with ID starting with 3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81 not found: ID does not exist" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.949334 4902 scope.go:117] "RemoveContainer" containerID="15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070" Dec 02 14:38:20 crc kubenswrapper[4902]: E1202 14:38:20.949832 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070\": container with ID starting with 15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070 not found: ID does not exist" containerID="15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.949865 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070"} err="failed to get container status \"15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070\": rpc error: code = NotFound desc = could not find container \"15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070\": container with ID starting with 15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070 not found: ID does not exist" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.949895 4902 scope.go:117] "RemoveContainer" containerID="3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.950243 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81"} err="failed to get container status \"3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81\": rpc error: code = NotFound desc = could not find container \"3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81\": container with ID starting with 3031ca880bc8e55aa7e2b525ade391eaf729300d4952dfb3fa903bb3a98a7e81 not found: ID does not exist" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.950288 4902 scope.go:117] "RemoveContainer" containerID="15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070" Dec 02 14:38:20 crc kubenswrapper[4902]: I1202 14:38:20.950625 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070"} err="failed to get container status \"15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070\": rpc error: code = NotFound desc = could not find container \"15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070\": container with ID starting with 15268e5335b8d43cece3b7f9d4a4dca76f5bc3e541211524504d93291b734070 not found: ID does not exist" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.028335 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d81580a2-c346-4315-aee2-d6346b5c55d0-logs\") pod \"nova-metadata-0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.028401 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eccfb03-21f3-4259-85bc-625274a3b3a8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"9eccfb03-21f3-4259-85bc-625274a3b3a8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.028480 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eccfb03-21f3-4259-85bc-625274a3b3a8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"9eccfb03-21f3-4259-85bc-625274a3b3a8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.028505 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mfrw\" (UniqueName: \"kubernetes.io/projected/9eccfb03-21f3-4259-85bc-625274a3b3a8-kube-api-access-4mfrw\") pod \"nova-cell1-conductor-0\" (UID: \"9eccfb03-21f3-4259-85bc-625274a3b3a8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.028665 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-config-data\") pod \"nova-metadata-0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.028692 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7flj\" (UniqueName: \"kubernetes.io/projected/d81580a2-c346-4315-aee2-d6346b5c55d0-kube-api-access-d7flj\") pod \"nova-metadata-0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.028759 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.028810 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.032275 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eccfb03-21f3-4259-85bc-625274a3b3a8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"9eccfb03-21f3-4259-85bc-625274a3b3a8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.043408 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eccfb03-21f3-4259-85bc-625274a3b3a8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"9eccfb03-21f3-4259-85bc-625274a3b3a8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.048222 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mfrw\" (UniqueName: \"kubernetes.io/projected/9eccfb03-21f3-4259-85bc-625274a3b3a8-kube-api-access-4mfrw\") pod \"nova-cell1-conductor-0\" (UID: \"9eccfb03-21f3-4259-85bc-625274a3b3a8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.117251 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a363025-47e0-4959-89c1-23184523a6ad" path="/var/lib/kubelet/pods/5a363025-47e0-4959-89c1-23184523a6ad/volumes" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.118155 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bddd6267-1a8a-4783-a674-4037dc5c52da" path="/var/lib/kubelet/pods/bddd6267-1a8a-4783-a674-4037dc5c52da/volumes" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.130755 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-config-data\") pod \"nova-metadata-0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.130801 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7flj\" (UniqueName: \"kubernetes.io/projected/d81580a2-c346-4315-aee2-d6346b5c55d0-kube-api-access-d7flj\") pod \"nova-metadata-0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.130845 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.130874 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.130908 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d81580a2-c346-4315-aee2-d6346b5c55d0-logs\") pod \"nova-metadata-0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.131442 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d81580a2-c346-4315-aee2-d6346b5c55d0-logs\") pod \"nova-metadata-0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.134217 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.134388 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.135033 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-config-data\") pod \"nova-metadata-0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.147535 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7flj\" (UniqueName: \"kubernetes.io/projected/d81580a2-c346-4315-aee2-d6346b5c55d0-kube-api-access-d7flj\") pod \"nova-metadata-0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.168658 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.246537 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.643350 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.770932 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.796249 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"9eccfb03-21f3-4259-85bc-625274a3b3a8","Type":"ContainerStarted","Data":"13649cb5b7a96a3235501cbb779301b20f23f3ec76558dfa3776063d27321353"} Dec 02 14:38:21 crc kubenswrapper[4902]: I1202 14:38:21.799453 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d81580a2-c346-4315-aee2-d6346b5c55d0","Type":"ContainerStarted","Data":"8293b8b3f78faad9ecd5a4971fc8dc5bf5ee73c1355930b0c0ecc3694341af48"} Dec 02 14:38:22 crc kubenswrapper[4902]: I1202 14:38:22.816525 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d81580a2-c346-4315-aee2-d6346b5c55d0","Type":"ContainerStarted","Data":"dd1ec77aaea611dc396284d97b0692ab6fe8606cf28968b971ee5bcd17f614d3"} Dec 02 14:38:22 crc kubenswrapper[4902]: I1202 14:38:22.817146 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d81580a2-c346-4315-aee2-d6346b5c55d0","Type":"ContainerStarted","Data":"b3d77c1e37faf51c6022b6cd28af4bff0972afe51ef8f1525fb52bff71f17c2a"} Dec 02 14:38:22 crc kubenswrapper[4902]: I1202 14:38:22.821972 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"9eccfb03-21f3-4259-85bc-625274a3b3a8","Type":"ContainerStarted","Data":"37b70b5984d5b572357096004f482e0da22754bfb512eeca55388aae1588a253"} Dec 02 14:38:22 crc kubenswrapper[4902]: I1202 14:38:22.823921 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 02 14:38:22 crc kubenswrapper[4902]: I1202 14:38:22.857099 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.8570733280000002 podStartE2EDuration="2.857073328s" podCreationTimestamp="2025-12-02 14:38:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:38:22.844927655 +0000 UTC m=+1334.036236374" watchObservedRunningTime="2025-12-02 14:38:22.857073328 +0000 UTC m=+1334.048382057" Dec 02 14:38:22 crc kubenswrapper[4902]: I1202 14:38:22.886873 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.886852042 podStartE2EDuration="2.886852042s" podCreationTimestamp="2025-12-02 14:38:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:38:22.875893111 +0000 UTC m=+1334.067201840" watchObservedRunningTime="2025-12-02 14:38:22.886852042 +0000 UTC m=+1334.078160761" Dec 02 14:38:23 crc kubenswrapper[4902]: E1202 14:38:23.397770 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b34b37e8599748f97dfb81bc78707e8ea59ebac3896c794eeb3b2cfc0f190a04" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 02 14:38:23 crc kubenswrapper[4902]: E1202 14:38:23.399219 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b34b37e8599748f97dfb81bc78707e8ea59ebac3896c794eeb3b2cfc0f190a04" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 02 14:38:23 crc kubenswrapper[4902]: E1202 14:38:23.400717 4902 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b34b37e8599748f97dfb81bc78707e8ea59ebac3896c794eeb3b2cfc0f190a04" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 02 14:38:23 crc kubenswrapper[4902]: E1202 14:38:23.400747 4902 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="1774ade2-b63d-4aca-9bcf-abbd17486f41" containerName="nova-scheduler-scheduler" Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.669631 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.797663 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.840419 4902 generic.go:334] "Generic (PLEG): container finished" podID="9c38ee1b-f029-4d5d-91cc-fd29714708b3" containerID="52f9bbd4b4254f522f172ea151c743cc578e7d990d678d2a2da8f0349ec532f2" exitCode=0 Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.840519 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.840529 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c38ee1b-f029-4d5d-91cc-fd29714708b3","Type":"ContainerDied","Data":"52f9bbd4b4254f522f172ea151c743cc578e7d990d678d2a2da8f0349ec532f2"} Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.840637 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c38ee1b-f029-4d5d-91cc-fd29714708b3","Type":"ContainerDied","Data":"c3c573089d93417d015c606015c340cf87368586e34e7b5c92a9b68306364514"} Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.840686 4902 scope.go:117] "RemoveContainer" containerID="52f9bbd4b4254f522f172ea151c743cc578e7d990d678d2a2da8f0349ec532f2" Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.863793 4902 scope.go:117] "RemoveContainer" containerID="4ce98fcf1e22cc85002df3dc171642501e15808aa77e3466810ca69ff375282f" Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.883732 4902 scope.go:117] "RemoveContainer" containerID="52f9bbd4b4254f522f172ea151c743cc578e7d990d678d2a2da8f0349ec532f2" Dec 02 14:38:24 crc kubenswrapper[4902]: E1202 14:38:24.884324 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52f9bbd4b4254f522f172ea151c743cc578e7d990d678d2a2da8f0349ec532f2\": container with ID starting with 52f9bbd4b4254f522f172ea151c743cc578e7d990d678d2a2da8f0349ec532f2 not found: ID does not exist" containerID="52f9bbd4b4254f522f172ea151c743cc578e7d990d678d2a2da8f0349ec532f2" Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.884357 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52f9bbd4b4254f522f172ea151c743cc578e7d990d678d2a2da8f0349ec532f2"} err="failed to get container status \"52f9bbd4b4254f522f172ea151c743cc578e7d990d678d2a2da8f0349ec532f2\": rpc error: code = NotFound desc = could not find container \"52f9bbd4b4254f522f172ea151c743cc578e7d990d678d2a2da8f0349ec532f2\": container with ID starting with 52f9bbd4b4254f522f172ea151c743cc578e7d990d678d2a2da8f0349ec532f2 not found: ID does not exist" Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.884378 4902 scope.go:117] "RemoveContainer" containerID="4ce98fcf1e22cc85002df3dc171642501e15808aa77e3466810ca69ff375282f" Dec 02 14:38:24 crc kubenswrapper[4902]: E1202 14:38:24.884764 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ce98fcf1e22cc85002df3dc171642501e15808aa77e3466810ca69ff375282f\": container with ID starting with 4ce98fcf1e22cc85002df3dc171642501e15808aa77e3466810ca69ff375282f not found: ID does not exist" containerID="4ce98fcf1e22cc85002df3dc171642501e15808aa77e3466810ca69ff375282f" Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.884809 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ce98fcf1e22cc85002df3dc171642501e15808aa77e3466810ca69ff375282f"} err="failed to get container status \"4ce98fcf1e22cc85002df3dc171642501e15808aa77e3466810ca69ff375282f\": rpc error: code = NotFound desc = could not find container \"4ce98fcf1e22cc85002df3dc171642501e15808aa77e3466810ca69ff375282f\": container with ID starting with 4ce98fcf1e22cc85002df3dc171642501e15808aa77e3466810ca69ff375282f not found: ID does not exist" Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.922390 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c38ee1b-f029-4d5d-91cc-fd29714708b3-logs\") pod \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.922650 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c38ee1b-f029-4d5d-91cc-fd29714708b3-config-data\") pod \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.922730 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztz78\" (UniqueName: \"kubernetes.io/projected/9c38ee1b-f029-4d5d-91cc-fd29714708b3-kube-api-access-ztz78\") pod \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.922774 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c38ee1b-f029-4d5d-91cc-fd29714708b3-combined-ca-bundle\") pod \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\" (UID: \"9c38ee1b-f029-4d5d-91cc-fd29714708b3\") " Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.923079 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c38ee1b-f029-4d5d-91cc-fd29714708b3-logs" (OuterVolumeSpecName: "logs") pod "9c38ee1b-f029-4d5d-91cc-fd29714708b3" (UID: "9c38ee1b-f029-4d5d-91cc-fd29714708b3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.924415 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c38ee1b-f029-4d5d-91cc-fd29714708b3-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.941468 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c38ee1b-f029-4d5d-91cc-fd29714708b3-kube-api-access-ztz78" (OuterVolumeSpecName: "kube-api-access-ztz78") pod "9c38ee1b-f029-4d5d-91cc-fd29714708b3" (UID: "9c38ee1b-f029-4d5d-91cc-fd29714708b3"). InnerVolumeSpecName "kube-api-access-ztz78". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.957886 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c38ee1b-f029-4d5d-91cc-fd29714708b3-config-data" (OuterVolumeSpecName: "config-data") pod "9c38ee1b-f029-4d5d-91cc-fd29714708b3" (UID: "9c38ee1b-f029-4d5d-91cc-fd29714708b3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:24 crc kubenswrapper[4902]: I1202 14:38:24.966717 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c38ee1b-f029-4d5d-91cc-fd29714708b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9c38ee1b-f029-4d5d-91cc-fd29714708b3" (UID: "9c38ee1b-f029-4d5d-91cc-fd29714708b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.026523 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c38ee1b-f029-4d5d-91cc-fd29714708b3-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.026992 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztz78\" (UniqueName: \"kubernetes.io/projected/9c38ee1b-f029-4d5d-91cc-fd29714708b3-kube-api-access-ztz78\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.027072 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c38ee1b-f029-4d5d-91cc-fd29714708b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.174396 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.208850 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.256921 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 14:38:25 crc kubenswrapper[4902]: E1202 14:38:25.257825 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c38ee1b-f029-4d5d-91cc-fd29714708b3" containerName="nova-api-api" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.257845 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c38ee1b-f029-4d5d-91cc-fd29714708b3" containerName="nova-api-api" Dec 02 14:38:25 crc kubenswrapper[4902]: E1202 14:38:25.257902 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c38ee1b-f029-4d5d-91cc-fd29714708b3" containerName="nova-api-log" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.257909 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c38ee1b-f029-4d5d-91cc-fd29714708b3" containerName="nova-api-log" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.267990 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c38ee1b-f029-4d5d-91cc-fd29714708b3" containerName="nova-api-log" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.268026 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c38ee1b-f029-4d5d-91cc-fd29714708b3" containerName="nova-api-api" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.272366 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.282124 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.295444 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.344628 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvbf8\" (UniqueName: \"kubernetes.io/projected/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-kube-api-access-lvbf8\") pod \"nova-api-0\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " pod="openstack/nova-api-0" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.344682 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-config-data\") pod \"nova-api-0\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " pod="openstack/nova-api-0" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.344775 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-logs\") pod \"nova-api-0\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " pod="openstack/nova-api-0" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.344839 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " pod="openstack/nova-api-0" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.447092 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvbf8\" (UniqueName: \"kubernetes.io/projected/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-kube-api-access-lvbf8\") pod \"nova-api-0\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " pod="openstack/nova-api-0" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.447144 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-config-data\") pod \"nova-api-0\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " pod="openstack/nova-api-0" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.447208 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-logs\") pod \"nova-api-0\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " pod="openstack/nova-api-0" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.447234 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " pod="openstack/nova-api-0" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.447642 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-logs\") pod \"nova-api-0\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " pod="openstack/nova-api-0" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.450933 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " pod="openstack/nova-api-0" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.451413 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-config-data\") pod \"nova-api-0\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " pod="openstack/nova-api-0" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.464711 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvbf8\" (UniqueName: \"kubernetes.io/projected/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-kube-api-access-lvbf8\") pod \"nova-api-0\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " pod="openstack/nova-api-0" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.612164 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.854328 4902 generic.go:334] "Generic (PLEG): container finished" podID="1774ade2-b63d-4aca-9bcf-abbd17486f41" containerID="b34b37e8599748f97dfb81bc78707e8ea59ebac3896c794eeb3b2cfc0f190a04" exitCode=0 Dec 02 14:38:25 crc kubenswrapper[4902]: I1202 14:38:25.854548 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1774ade2-b63d-4aca-9bcf-abbd17486f41","Type":"ContainerDied","Data":"b34b37e8599748f97dfb81bc78707e8ea59ebac3896c794eeb3b2cfc0f190a04"} Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.050795 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.055598 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.158045 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngkwz\" (UniqueName: \"kubernetes.io/projected/1774ade2-b63d-4aca-9bcf-abbd17486f41-kube-api-access-ngkwz\") pod \"1774ade2-b63d-4aca-9bcf-abbd17486f41\" (UID: \"1774ade2-b63d-4aca-9bcf-abbd17486f41\") " Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.158330 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1774ade2-b63d-4aca-9bcf-abbd17486f41-config-data\") pod \"1774ade2-b63d-4aca-9bcf-abbd17486f41\" (UID: \"1774ade2-b63d-4aca-9bcf-abbd17486f41\") " Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.158362 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1774ade2-b63d-4aca-9bcf-abbd17486f41-combined-ca-bundle\") pod \"1774ade2-b63d-4aca-9bcf-abbd17486f41\" (UID: \"1774ade2-b63d-4aca-9bcf-abbd17486f41\") " Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.166580 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1774ade2-b63d-4aca-9bcf-abbd17486f41-kube-api-access-ngkwz" (OuterVolumeSpecName: "kube-api-access-ngkwz") pod "1774ade2-b63d-4aca-9bcf-abbd17486f41" (UID: "1774ade2-b63d-4aca-9bcf-abbd17486f41"). InnerVolumeSpecName "kube-api-access-ngkwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.205693 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1774ade2-b63d-4aca-9bcf-abbd17486f41-config-data" (OuterVolumeSpecName: "config-data") pod "1774ade2-b63d-4aca-9bcf-abbd17486f41" (UID: "1774ade2-b63d-4aca-9bcf-abbd17486f41"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.211192 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.211936 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1774ade2-b63d-4aca-9bcf-abbd17486f41-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1774ade2-b63d-4aca-9bcf-abbd17486f41" (UID: "1774ade2-b63d-4aca-9bcf-abbd17486f41"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.247706 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.248789 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.262337 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngkwz\" (UniqueName: \"kubernetes.io/projected/1774ade2-b63d-4aca-9bcf-abbd17486f41-kube-api-access-ngkwz\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.262371 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1774ade2-b63d-4aca-9bcf-abbd17486f41-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.262381 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1774ade2-b63d-4aca-9bcf-abbd17486f41-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.869747 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa","Type":"ContainerStarted","Data":"53707252471c868acc9e212e63b98ca4708099d1edafc2d82ee4c4aa01d3e442"} Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.869800 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa","Type":"ContainerStarted","Data":"d0e650923cfcda33a6c620b948152401f1c163f80d5ca55b9bf1d6fa477ca31d"} Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.869813 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa","Type":"ContainerStarted","Data":"bdfdefb2b607b80012b1431d569da80c81e89a77ba8833329b47a64d51f2e820"} Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.872326 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1774ade2-b63d-4aca-9bcf-abbd17486f41","Type":"ContainerDied","Data":"9e56786da89be85eb64bab0cc6958fd8baf81ec3675f3b9be5ea361bfdb81e78"} Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.872351 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.872395 4902 scope.go:117] "RemoveContainer" containerID="b34b37e8599748f97dfb81bc78707e8ea59ebac3896c794eeb3b2cfc0f190a04" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.888115 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.8880967960000001 podStartE2EDuration="1.888096796s" podCreationTimestamp="2025-12-02 14:38:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:38:26.886038557 +0000 UTC m=+1338.077347266" watchObservedRunningTime="2025-12-02 14:38:26.888096796 +0000 UTC m=+1338.079405505" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.914917 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.923496 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.936074 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 14:38:26 crc kubenswrapper[4902]: E1202 14:38:26.936464 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1774ade2-b63d-4aca-9bcf-abbd17486f41" containerName="nova-scheduler-scheduler" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.936484 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="1774ade2-b63d-4aca-9bcf-abbd17486f41" containerName="nova-scheduler-scheduler" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.936699 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="1774ade2-b63d-4aca-9bcf-abbd17486f41" containerName="nova-scheduler-scheduler" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.937402 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.939634 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 02 14:38:26 crc kubenswrapper[4902]: I1202 14:38:26.944685 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 14:38:27 crc kubenswrapper[4902]: I1202 14:38:27.076793 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8dwh\" (UniqueName: \"kubernetes.io/projected/3cf756fd-5346-49b5-a556-f6d0b71941c9-kube-api-access-h8dwh\") pod \"nova-scheduler-0\" (UID: \"3cf756fd-5346-49b5-a556-f6d0b71941c9\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:27 crc kubenswrapper[4902]: I1202 14:38:27.076972 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cf756fd-5346-49b5-a556-f6d0b71941c9-config-data\") pod \"nova-scheduler-0\" (UID: \"3cf756fd-5346-49b5-a556-f6d0b71941c9\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:27 crc kubenswrapper[4902]: I1202 14:38:27.077177 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cf756fd-5346-49b5-a556-f6d0b71941c9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3cf756fd-5346-49b5-a556-f6d0b71941c9\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:27 crc kubenswrapper[4902]: I1202 14:38:27.118631 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1774ade2-b63d-4aca-9bcf-abbd17486f41" path="/var/lib/kubelet/pods/1774ade2-b63d-4aca-9bcf-abbd17486f41/volumes" Dec 02 14:38:27 crc kubenswrapper[4902]: I1202 14:38:27.119226 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c38ee1b-f029-4d5d-91cc-fd29714708b3" path="/var/lib/kubelet/pods/9c38ee1b-f029-4d5d-91cc-fd29714708b3/volumes" Dec 02 14:38:27 crc kubenswrapper[4902]: I1202 14:38:27.181853 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8dwh\" (UniqueName: \"kubernetes.io/projected/3cf756fd-5346-49b5-a556-f6d0b71941c9-kube-api-access-h8dwh\") pod \"nova-scheduler-0\" (UID: \"3cf756fd-5346-49b5-a556-f6d0b71941c9\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:27 crc kubenswrapper[4902]: I1202 14:38:27.181958 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cf756fd-5346-49b5-a556-f6d0b71941c9-config-data\") pod \"nova-scheduler-0\" (UID: \"3cf756fd-5346-49b5-a556-f6d0b71941c9\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:27 crc kubenswrapper[4902]: I1202 14:38:27.182039 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cf756fd-5346-49b5-a556-f6d0b71941c9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3cf756fd-5346-49b5-a556-f6d0b71941c9\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:27 crc kubenswrapper[4902]: I1202 14:38:27.189507 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cf756fd-5346-49b5-a556-f6d0b71941c9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3cf756fd-5346-49b5-a556-f6d0b71941c9\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:27 crc kubenswrapper[4902]: I1202 14:38:27.189711 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cf756fd-5346-49b5-a556-f6d0b71941c9-config-data\") pod \"nova-scheduler-0\" (UID: \"3cf756fd-5346-49b5-a556-f6d0b71941c9\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:27 crc kubenswrapper[4902]: I1202 14:38:27.207123 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8dwh\" (UniqueName: \"kubernetes.io/projected/3cf756fd-5346-49b5-a556-f6d0b71941c9-kube-api-access-h8dwh\") pod \"nova-scheduler-0\" (UID: \"3cf756fd-5346-49b5-a556-f6d0b71941c9\") " pod="openstack/nova-scheduler-0" Dec 02 14:38:27 crc kubenswrapper[4902]: I1202 14:38:27.258009 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 14:38:27 crc kubenswrapper[4902]: I1202 14:38:27.754276 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 14:38:27 crc kubenswrapper[4902]: I1202 14:38:27.890921 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3cf756fd-5346-49b5-a556-f6d0b71941c9","Type":"ContainerStarted","Data":"8a40149cd27214d72f07ca5d62d9da91eadbbb9ba2ec2f8d23dc7906eafc9301"} Dec 02 14:38:28 crc kubenswrapper[4902]: I1202 14:38:28.902356 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3cf756fd-5346-49b5-a556-f6d0b71941c9","Type":"ContainerStarted","Data":"0a176451574e2c20596be153a27568e29d0ca5e033c9cfc487c6578816e6e3a2"} Dec 02 14:38:28 crc kubenswrapper[4902]: I1202 14:38:28.931766 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.931739655 podStartE2EDuration="2.931739655s" podCreationTimestamp="2025-12-02 14:38:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:38:28.927945648 +0000 UTC m=+1340.119254357" watchObservedRunningTime="2025-12-02 14:38:28.931739655 +0000 UTC m=+1340.123048384" Dec 02 14:38:28 crc kubenswrapper[4902]: I1202 14:38:28.988087 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 14:38:28 crc kubenswrapper[4902]: I1202 14:38:28.988340 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="9dec59dd-e5ef-40bd-bdac-ba601e08ef96" containerName="kube-state-metrics" containerID="cri-o://923f094dbbc6a10f1dac70d7863f9222d9d7538f6540ff54122ef1ba81b8bd43" gracePeriod=30 Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.538640 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.628002 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7wdf\" (UniqueName: \"kubernetes.io/projected/9dec59dd-e5ef-40bd-bdac-ba601e08ef96-kube-api-access-n7wdf\") pod \"9dec59dd-e5ef-40bd-bdac-ba601e08ef96\" (UID: \"9dec59dd-e5ef-40bd-bdac-ba601e08ef96\") " Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.652254 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dec59dd-e5ef-40bd-bdac-ba601e08ef96-kube-api-access-n7wdf" (OuterVolumeSpecName: "kube-api-access-n7wdf") pod "9dec59dd-e5ef-40bd-bdac-ba601e08ef96" (UID: "9dec59dd-e5ef-40bd-bdac-ba601e08ef96"). InnerVolumeSpecName "kube-api-access-n7wdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.730758 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7wdf\" (UniqueName: \"kubernetes.io/projected/9dec59dd-e5ef-40bd-bdac-ba601e08ef96-kube-api-access-n7wdf\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.913768 4902 generic.go:334] "Generic (PLEG): container finished" podID="9dec59dd-e5ef-40bd-bdac-ba601e08ef96" containerID="923f094dbbc6a10f1dac70d7863f9222d9d7538f6540ff54122ef1ba81b8bd43" exitCode=2 Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.913813 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9dec59dd-e5ef-40bd-bdac-ba601e08ef96","Type":"ContainerDied","Data":"923f094dbbc6a10f1dac70d7863f9222d9d7538f6540ff54122ef1ba81b8bd43"} Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.913881 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9dec59dd-e5ef-40bd-bdac-ba601e08ef96","Type":"ContainerDied","Data":"544d6114e7db746e72d02dab86eb2db365091ba29e23db521cfabef5c9141f5d"} Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.913897 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.913918 4902 scope.go:117] "RemoveContainer" containerID="923f094dbbc6a10f1dac70d7863f9222d9d7538f6540ff54122ef1ba81b8bd43" Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.941991 4902 scope.go:117] "RemoveContainer" containerID="923f094dbbc6a10f1dac70d7863f9222d9d7538f6540ff54122ef1ba81b8bd43" Dec 02 14:38:29 crc kubenswrapper[4902]: E1202 14:38:29.942539 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"923f094dbbc6a10f1dac70d7863f9222d9d7538f6540ff54122ef1ba81b8bd43\": container with ID starting with 923f094dbbc6a10f1dac70d7863f9222d9d7538f6540ff54122ef1ba81b8bd43 not found: ID does not exist" containerID="923f094dbbc6a10f1dac70d7863f9222d9d7538f6540ff54122ef1ba81b8bd43" Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.942587 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"923f094dbbc6a10f1dac70d7863f9222d9d7538f6540ff54122ef1ba81b8bd43"} err="failed to get container status \"923f094dbbc6a10f1dac70d7863f9222d9d7538f6540ff54122ef1ba81b8bd43\": rpc error: code = NotFound desc = could not find container \"923f094dbbc6a10f1dac70d7863f9222d9d7538f6540ff54122ef1ba81b8bd43\": container with ID starting with 923f094dbbc6a10f1dac70d7863f9222d9d7538f6540ff54122ef1ba81b8bd43 not found: ID does not exist" Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.950731 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.968584 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.984753 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 14:38:29 crc kubenswrapper[4902]: E1202 14:38:29.985314 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dec59dd-e5ef-40bd-bdac-ba601e08ef96" containerName="kube-state-metrics" Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.985337 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dec59dd-e5ef-40bd-bdac-ba601e08ef96" containerName="kube-state-metrics" Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.985662 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dec59dd-e5ef-40bd-bdac-ba601e08ef96" containerName="kube-state-metrics" Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.986604 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.991415 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.994494 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 02 14:38:29 crc kubenswrapper[4902]: I1202 14:38:29.997013 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.138374 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8588d8af-b946-4d96-bd28-472301c12a7b-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"8588d8af-b946-4d96-bd28-472301c12a7b\") " pod="openstack/kube-state-metrics-0" Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.138527 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8588d8af-b946-4d96-bd28-472301c12a7b-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"8588d8af-b946-4d96-bd28-472301c12a7b\") " pod="openstack/kube-state-metrics-0" Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.138711 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8588d8af-b946-4d96-bd28-472301c12a7b-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"8588d8af-b946-4d96-bd28-472301c12a7b\") " pod="openstack/kube-state-metrics-0" Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.138754 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s82z7\" (UniqueName: \"kubernetes.io/projected/8588d8af-b946-4d96-bd28-472301c12a7b-kube-api-access-s82z7\") pod \"kube-state-metrics-0\" (UID: \"8588d8af-b946-4d96-bd28-472301c12a7b\") " pod="openstack/kube-state-metrics-0" Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.240948 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8588d8af-b946-4d96-bd28-472301c12a7b-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"8588d8af-b946-4d96-bd28-472301c12a7b\") " pod="openstack/kube-state-metrics-0" Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.242316 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8588d8af-b946-4d96-bd28-472301c12a7b-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"8588d8af-b946-4d96-bd28-472301c12a7b\") " pod="openstack/kube-state-metrics-0" Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.242388 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8588d8af-b946-4d96-bd28-472301c12a7b-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"8588d8af-b946-4d96-bd28-472301c12a7b\") " pod="openstack/kube-state-metrics-0" Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.242407 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s82z7\" (UniqueName: \"kubernetes.io/projected/8588d8af-b946-4d96-bd28-472301c12a7b-kube-api-access-s82z7\") pod \"kube-state-metrics-0\" (UID: \"8588d8af-b946-4d96-bd28-472301c12a7b\") " pod="openstack/kube-state-metrics-0" Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.252817 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8588d8af-b946-4d96-bd28-472301c12a7b-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"8588d8af-b946-4d96-bd28-472301c12a7b\") " pod="openstack/kube-state-metrics-0" Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.252857 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8588d8af-b946-4d96-bd28-472301c12a7b-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"8588d8af-b946-4d96-bd28-472301c12a7b\") " pod="openstack/kube-state-metrics-0" Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.258694 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8588d8af-b946-4d96-bd28-472301c12a7b-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"8588d8af-b946-4d96-bd28-472301c12a7b\") " pod="openstack/kube-state-metrics-0" Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.259321 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s82z7\" (UniqueName: \"kubernetes.io/projected/8588d8af-b946-4d96-bd28-472301c12a7b-kube-api-access-s82z7\") pod \"kube-state-metrics-0\" (UID: \"8588d8af-b946-4d96-bd28-472301c12a7b\") " pod="openstack/kube-state-metrics-0" Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.310064 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.808572 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.924940 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8588d8af-b946-4d96-bd28-472301c12a7b","Type":"ContainerStarted","Data":"525486f0c12e9f3622dc1aa8a2019d20470b969d94f960f1d04dc3a84471552c"} Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.981687 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.982040 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="ceilometer-central-agent" containerID="cri-o://9131e98de1c79a3dc58bf3e115f061f9ee66c8ccc8574f3c5020d8a5a6431585" gracePeriod=30 Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.982129 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="sg-core" containerID="cri-o://70933a1f9a137013c86791f9b5fc12260ffff205fbd6ac0884fa93b639792d44" gracePeriod=30 Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.982115 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="proxy-httpd" containerID="cri-o://8e676c6bfedd3616a8b8f3619eaa527ad22ad988b2bebaac77bf1cc1dd31fc52" gracePeriod=30 Dec 02 14:38:30 crc kubenswrapper[4902]: I1202 14:38:30.983042 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="ceilometer-notification-agent" containerID="cri-o://ca71e50552dfd4c295675dc22e477d5f21fd27b02123cb96ad22306542af25c0" gracePeriod=30 Dec 02 14:38:31 crc kubenswrapper[4902]: I1202 14:38:31.148932 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dec59dd-e5ef-40bd-bdac-ba601e08ef96" path="/var/lib/kubelet/pods/9dec59dd-e5ef-40bd-bdac-ba601e08ef96/volumes" Dec 02 14:38:31 crc kubenswrapper[4902]: I1202 14:38:31.252227 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 14:38:31 crc kubenswrapper[4902]: I1202 14:38:31.252265 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 14:38:31 crc kubenswrapper[4902]: I1202 14:38:31.940220 4902 generic.go:334] "Generic (PLEG): container finished" podID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerID="8e676c6bfedd3616a8b8f3619eaa527ad22ad988b2bebaac77bf1cc1dd31fc52" exitCode=0 Dec 02 14:38:31 crc kubenswrapper[4902]: I1202 14:38:31.940480 4902 generic.go:334] "Generic (PLEG): container finished" podID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerID="70933a1f9a137013c86791f9b5fc12260ffff205fbd6ac0884fa93b639792d44" exitCode=2 Dec 02 14:38:31 crc kubenswrapper[4902]: I1202 14:38:31.940492 4902 generic.go:334] "Generic (PLEG): container finished" podID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerID="9131e98de1c79a3dc58bf3e115f061f9ee66c8ccc8574f3c5020d8a5a6431585" exitCode=0 Dec 02 14:38:31 crc kubenswrapper[4902]: I1202 14:38:31.940276 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a891d0bb-de55-4715-86a6-e0d1c893322d","Type":"ContainerDied","Data":"8e676c6bfedd3616a8b8f3619eaa527ad22ad988b2bebaac77bf1cc1dd31fc52"} Dec 02 14:38:31 crc kubenswrapper[4902]: I1202 14:38:31.940540 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a891d0bb-de55-4715-86a6-e0d1c893322d","Type":"ContainerDied","Data":"70933a1f9a137013c86791f9b5fc12260ffff205fbd6ac0884fa93b639792d44"} Dec 02 14:38:31 crc kubenswrapper[4902]: I1202 14:38:31.940617 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a891d0bb-de55-4715-86a6-e0d1c893322d","Type":"ContainerDied","Data":"9131e98de1c79a3dc58bf3e115f061f9ee66c8ccc8574f3c5020d8a5a6431585"} Dec 02 14:38:31 crc kubenswrapper[4902]: I1202 14:38:31.942757 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8588d8af-b946-4d96-bd28-472301c12a7b","Type":"ContainerStarted","Data":"1f0edee0cab22dcbb4bf0951a870c7b215283dbda8eee2c691d30f6250c72294"} Dec 02 14:38:31 crc kubenswrapper[4902]: I1202 14:38:31.942962 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 02 14:38:31 crc kubenswrapper[4902]: I1202 14:38:31.973339 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.554794716 podStartE2EDuration="2.97331888s" podCreationTimestamp="2025-12-02 14:38:29 +0000 UTC" firstStartedPulling="2025-12-02 14:38:30.81912614 +0000 UTC m=+1342.010434859" lastFinishedPulling="2025-12-02 14:38:31.237650314 +0000 UTC m=+1342.428959023" observedRunningTime="2025-12-02 14:38:31.965856338 +0000 UTC m=+1343.157165057" watchObservedRunningTime="2025-12-02 14:38:31.97331888 +0000 UTC m=+1343.164627589" Dec 02 14:38:32 crc kubenswrapper[4902]: I1202 14:38:32.259007 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 02 14:38:32 crc kubenswrapper[4902]: I1202 14:38:32.269726 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d81580a2-c346-4315-aee2-d6346b5c55d0" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.212:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 14:38:32 crc kubenswrapper[4902]: I1202 14:38:32.269766 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d81580a2-c346-4315-aee2-d6346b5c55d0" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.212:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 14:38:33 crc kubenswrapper[4902]: E1202 14:38:33.343726 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda891d0bb_de55_4715_86a6_e0d1c893322d.slice/crio-ca71e50552dfd4c295675dc22e477d5f21fd27b02123cb96ad22306542af25c0.scope\": RecentStats: unable to find data in memory cache]" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.582349 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.715008 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmkjd\" (UniqueName: \"kubernetes.io/projected/a891d0bb-de55-4715-86a6-e0d1c893322d-kube-api-access-bmkjd\") pod \"a891d0bb-de55-4715-86a6-e0d1c893322d\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.715336 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-combined-ca-bundle\") pod \"a891d0bb-de55-4715-86a6-e0d1c893322d\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.715470 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-scripts\") pod \"a891d0bb-de55-4715-86a6-e0d1c893322d\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.715514 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-sg-core-conf-yaml\") pod \"a891d0bb-de55-4715-86a6-e0d1c893322d\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.715534 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-config-data\") pod \"a891d0bb-de55-4715-86a6-e0d1c893322d\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.715664 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a891d0bb-de55-4715-86a6-e0d1c893322d-log-httpd\") pod \"a891d0bb-de55-4715-86a6-e0d1c893322d\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.715722 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a891d0bb-de55-4715-86a6-e0d1c893322d-run-httpd\") pod \"a891d0bb-de55-4715-86a6-e0d1c893322d\" (UID: \"a891d0bb-de55-4715-86a6-e0d1c893322d\") " Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.716181 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a891d0bb-de55-4715-86a6-e0d1c893322d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a891d0bb-de55-4715-86a6-e0d1c893322d" (UID: "a891d0bb-de55-4715-86a6-e0d1c893322d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.716334 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a891d0bb-de55-4715-86a6-e0d1c893322d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a891d0bb-de55-4715-86a6-e0d1c893322d" (UID: "a891d0bb-de55-4715-86a6-e0d1c893322d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.720384 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-scripts" (OuterVolumeSpecName: "scripts") pod "a891d0bb-de55-4715-86a6-e0d1c893322d" (UID: "a891d0bb-de55-4715-86a6-e0d1c893322d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.720915 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a891d0bb-de55-4715-86a6-e0d1c893322d-kube-api-access-bmkjd" (OuterVolumeSpecName: "kube-api-access-bmkjd") pod "a891d0bb-de55-4715-86a6-e0d1c893322d" (UID: "a891d0bb-de55-4715-86a6-e0d1c893322d"). InnerVolumeSpecName "kube-api-access-bmkjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.747360 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a891d0bb-de55-4715-86a6-e0d1c893322d" (UID: "a891d0bb-de55-4715-86a6-e0d1c893322d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.814203 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a891d0bb-de55-4715-86a6-e0d1c893322d" (UID: "a891d0bb-de55-4715-86a6-e0d1c893322d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.817382 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmkjd\" (UniqueName: \"kubernetes.io/projected/a891d0bb-de55-4715-86a6-e0d1c893322d-kube-api-access-bmkjd\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.817406 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.817414 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.817423 4902 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.817434 4902 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a891d0bb-de55-4715-86a6-e0d1c893322d-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.817441 4902 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a891d0bb-de55-4715-86a6-e0d1c893322d-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.849372 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-config-data" (OuterVolumeSpecName: "config-data") pod "a891d0bb-de55-4715-86a6-e0d1c893322d" (UID: "a891d0bb-de55-4715-86a6-e0d1c893322d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.918946 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a891d0bb-de55-4715-86a6-e0d1c893322d-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.980410 4902 generic.go:334] "Generic (PLEG): container finished" podID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerID="ca71e50552dfd4c295675dc22e477d5f21fd27b02123cb96ad22306542af25c0" exitCode=0 Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.980615 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a891d0bb-de55-4715-86a6-e0d1c893322d","Type":"ContainerDied","Data":"ca71e50552dfd4c295675dc22e477d5f21fd27b02123cb96ad22306542af25c0"} Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.980812 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a891d0bb-de55-4715-86a6-e0d1c893322d","Type":"ContainerDied","Data":"ac869843a29ac862b9bf3ad0a82c3db84fcf32806a9b022010a3d15c00683898"} Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.980917 4902 scope.go:117] "RemoveContainer" containerID="8e676c6bfedd3616a8b8f3619eaa527ad22ad988b2bebaac77bf1cc1dd31fc52" Dec 02 14:38:33 crc kubenswrapper[4902]: I1202 14:38:33.980715 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.006418 4902 scope.go:117] "RemoveContainer" containerID="70933a1f9a137013c86791f9b5fc12260ffff205fbd6ac0884fa93b639792d44" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.064718 4902 scope.go:117] "RemoveContainer" containerID="ca71e50552dfd4c295675dc22e477d5f21fd27b02123cb96ad22306542af25c0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.071248 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.077478 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.088871 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:38:34 crc kubenswrapper[4902]: E1202 14:38:34.089383 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="sg-core" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.089407 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="sg-core" Dec 02 14:38:34 crc kubenswrapper[4902]: E1202 14:38:34.089438 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="ceilometer-central-agent" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.089447 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="ceilometer-central-agent" Dec 02 14:38:34 crc kubenswrapper[4902]: E1202 14:38:34.089470 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="ceilometer-notification-agent" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.089478 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="ceilometer-notification-agent" Dec 02 14:38:34 crc kubenswrapper[4902]: E1202 14:38:34.089493 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="proxy-httpd" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.089502 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="proxy-httpd" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.089774 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="proxy-httpd" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.089805 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="ceilometer-central-agent" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.089823 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="ceilometer-notification-agent" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.089842 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" containerName="sg-core" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.093031 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.094252 4902 scope.go:117] "RemoveContainer" containerID="9131e98de1c79a3dc58bf3e115f061f9ee66c8ccc8574f3c5020d8a5a6431585" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.100862 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.101010 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.101963 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.105064 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.139841 4902 scope.go:117] "RemoveContainer" containerID="8e676c6bfedd3616a8b8f3619eaa527ad22ad988b2bebaac77bf1cc1dd31fc52" Dec 02 14:38:34 crc kubenswrapper[4902]: E1202 14:38:34.140239 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e676c6bfedd3616a8b8f3619eaa527ad22ad988b2bebaac77bf1cc1dd31fc52\": container with ID starting with 8e676c6bfedd3616a8b8f3619eaa527ad22ad988b2bebaac77bf1cc1dd31fc52 not found: ID does not exist" containerID="8e676c6bfedd3616a8b8f3619eaa527ad22ad988b2bebaac77bf1cc1dd31fc52" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.140272 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e676c6bfedd3616a8b8f3619eaa527ad22ad988b2bebaac77bf1cc1dd31fc52"} err="failed to get container status \"8e676c6bfedd3616a8b8f3619eaa527ad22ad988b2bebaac77bf1cc1dd31fc52\": rpc error: code = NotFound desc = could not find container \"8e676c6bfedd3616a8b8f3619eaa527ad22ad988b2bebaac77bf1cc1dd31fc52\": container with ID starting with 8e676c6bfedd3616a8b8f3619eaa527ad22ad988b2bebaac77bf1cc1dd31fc52 not found: ID does not exist" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.140294 4902 scope.go:117] "RemoveContainer" containerID="70933a1f9a137013c86791f9b5fc12260ffff205fbd6ac0884fa93b639792d44" Dec 02 14:38:34 crc kubenswrapper[4902]: E1202 14:38:34.140632 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70933a1f9a137013c86791f9b5fc12260ffff205fbd6ac0884fa93b639792d44\": container with ID starting with 70933a1f9a137013c86791f9b5fc12260ffff205fbd6ac0884fa93b639792d44 not found: ID does not exist" containerID="70933a1f9a137013c86791f9b5fc12260ffff205fbd6ac0884fa93b639792d44" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.140654 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70933a1f9a137013c86791f9b5fc12260ffff205fbd6ac0884fa93b639792d44"} err="failed to get container status \"70933a1f9a137013c86791f9b5fc12260ffff205fbd6ac0884fa93b639792d44\": rpc error: code = NotFound desc = could not find container \"70933a1f9a137013c86791f9b5fc12260ffff205fbd6ac0884fa93b639792d44\": container with ID starting with 70933a1f9a137013c86791f9b5fc12260ffff205fbd6ac0884fa93b639792d44 not found: ID does not exist" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.140670 4902 scope.go:117] "RemoveContainer" containerID="ca71e50552dfd4c295675dc22e477d5f21fd27b02123cb96ad22306542af25c0" Dec 02 14:38:34 crc kubenswrapper[4902]: E1202 14:38:34.140998 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca71e50552dfd4c295675dc22e477d5f21fd27b02123cb96ad22306542af25c0\": container with ID starting with ca71e50552dfd4c295675dc22e477d5f21fd27b02123cb96ad22306542af25c0 not found: ID does not exist" containerID="ca71e50552dfd4c295675dc22e477d5f21fd27b02123cb96ad22306542af25c0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.141027 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca71e50552dfd4c295675dc22e477d5f21fd27b02123cb96ad22306542af25c0"} err="failed to get container status \"ca71e50552dfd4c295675dc22e477d5f21fd27b02123cb96ad22306542af25c0\": rpc error: code = NotFound desc = could not find container \"ca71e50552dfd4c295675dc22e477d5f21fd27b02123cb96ad22306542af25c0\": container with ID starting with ca71e50552dfd4c295675dc22e477d5f21fd27b02123cb96ad22306542af25c0 not found: ID does not exist" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.141042 4902 scope.go:117] "RemoveContainer" containerID="9131e98de1c79a3dc58bf3e115f061f9ee66c8ccc8574f3c5020d8a5a6431585" Dec 02 14:38:34 crc kubenswrapper[4902]: E1202 14:38:34.141367 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9131e98de1c79a3dc58bf3e115f061f9ee66c8ccc8574f3c5020d8a5a6431585\": container with ID starting with 9131e98de1c79a3dc58bf3e115f061f9ee66c8ccc8574f3c5020d8a5a6431585 not found: ID does not exist" containerID="9131e98de1c79a3dc58bf3e115f061f9ee66c8ccc8574f3c5020d8a5a6431585" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.141459 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9131e98de1c79a3dc58bf3e115f061f9ee66c8ccc8574f3c5020d8a5a6431585"} err="failed to get container status \"9131e98de1c79a3dc58bf3e115f061f9ee66c8ccc8574f3c5020d8a5a6431585\": rpc error: code = NotFound desc = could not find container \"9131e98de1c79a3dc58bf3e115f061f9ee66c8ccc8574f3c5020d8a5a6431585\": container with ID starting with 9131e98de1c79a3dc58bf3e115f061f9ee66c8ccc8574f3c5020d8a5a6431585 not found: ID does not exist" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.230258 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af454a0f-ab13-4256-b19a-69d30b6c818b-log-httpd\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.230583 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.230760 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-scripts\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.230897 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.230999 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af454a0f-ab13-4256-b19a-69d30b6c818b-run-httpd\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.231180 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.231382 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-config-data\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.231498 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmzqq\" (UniqueName: \"kubernetes.io/projected/af454a0f-ab13-4256-b19a-69d30b6c818b-kube-api-access-wmzqq\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.332967 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-scripts\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.333354 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.333458 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af454a0f-ab13-4256-b19a-69d30b6c818b-run-httpd\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.333637 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.333755 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-config-data\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.333889 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmzqq\" (UniqueName: \"kubernetes.io/projected/af454a0f-ab13-4256-b19a-69d30b6c818b-kube-api-access-wmzqq\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.334043 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af454a0f-ab13-4256-b19a-69d30b6c818b-log-httpd\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.334438 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af454a0f-ab13-4256-b19a-69d30b6c818b-run-httpd\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.334528 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af454a0f-ab13-4256-b19a-69d30b6c818b-log-httpd\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.334631 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.341383 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.341504 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-scripts\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.341634 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.342480 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.343309 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-config-data\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.357592 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmzqq\" (UniqueName: \"kubernetes.io/projected/af454a0f-ab13-4256-b19a-69d30b6c818b-kube-api-access-wmzqq\") pod \"ceilometer-0\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.416659 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.932935 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:38:34 crc kubenswrapper[4902]: W1202 14:38:34.934040 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf454a0f_ab13_4256_b19a_69d30b6c818b.slice/crio-5aab265def0caf87ecd0f0454d08d2c181c520232680a97809a11a1759bbb458 WatchSource:0}: Error finding container 5aab265def0caf87ecd0f0454d08d2c181c520232680a97809a11a1759bbb458: Status 404 returned error can't find the container with id 5aab265def0caf87ecd0f0454d08d2c181c520232680a97809a11a1759bbb458 Dec 02 14:38:34 crc kubenswrapper[4902]: I1202 14:38:34.994280 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af454a0f-ab13-4256-b19a-69d30b6c818b","Type":"ContainerStarted","Data":"5aab265def0caf87ecd0f0454d08d2c181c520232680a97809a11a1759bbb458"} Dec 02 14:38:35 crc kubenswrapper[4902]: I1202 14:38:35.117102 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a891d0bb-de55-4715-86a6-e0d1c893322d" path="/var/lib/kubelet/pods/a891d0bb-de55-4715-86a6-e0d1c893322d/volumes" Dec 02 14:38:35 crc kubenswrapper[4902]: I1202 14:38:35.612454 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 14:38:35 crc kubenswrapper[4902]: I1202 14:38:35.612600 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 14:38:36 crc kubenswrapper[4902]: I1202 14:38:36.005719 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af454a0f-ab13-4256-b19a-69d30b6c818b","Type":"ContainerStarted","Data":"f7092e940615489ae9a1d42b018916f16216b31b96d664a719d04cbdf8f7c4da"} Dec 02 14:38:36 crc kubenswrapper[4902]: I1202 14:38:36.654219 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.213:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 14:38:36 crc kubenswrapper[4902]: I1202 14:38:36.654257 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.213:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 14:38:37 crc kubenswrapper[4902]: I1202 14:38:37.258507 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 02 14:38:37 crc kubenswrapper[4902]: I1202 14:38:37.287235 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 02 14:38:38 crc kubenswrapper[4902]: I1202 14:38:38.031228 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af454a0f-ab13-4256-b19a-69d30b6c818b","Type":"ContainerStarted","Data":"7f658cee83ff281db26e273f7b290de759f59c761d8a9f2cc599d292ce2e29fd"} Dec 02 14:38:38 crc kubenswrapper[4902]: I1202 14:38:38.059304 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 02 14:38:39 crc kubenswrapper[4902]: I1202 14:38:39.042080 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af454a0f-ab13-4256-b19a-69d30b6c818b","Type":"ContainerStarted","Data":"4da916dcb3132099de0bb8139fdf877715589589bd673bcd1a8937823f576611"} Dec 02 14:38:40 crc kubenswrapper[4902]: I1202 14:38:40.096933 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af454a0f-ab13-4256-b19a-69d30b6c818b","Type":"ContainerStarted","Data":"6a87787cfccfe22ace5bb57c0ac26c2944e9db55501a7a7741c70a16c63d8ec1"} Dec 02 14:38:40 crc kubenswrapper[4902]: I1202 14:38:40.097700 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 14:38:40 crc kubenswrapper[4902]: I1202 14:38:40.122402 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.433226862 podStartE2EDuration="6.122383177s" podCreationTimestamp="2025-12-02 14:38:34 +0000 UTC" firstStartedPulling="2025-12-02 14:38:34.955267275 +0000 UTC m=+1346.146575984" lastFinishedPulling="2025-12-02 14:38:39.64442359 +0000 UTC m=+1350.835732299" observedRunningTime="2025-12-02 14:38:40.119593038 +0000 UTC m=+1351.310901757" watchObservedRunningTime="2025-12-02 14:38:40.122383177 +0000 UTC m=+1351.313691886" Dec 02 14:38:40 crc kubenswrapper[4902]: I1202 14:38:40.328173 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 02 14:38:41 crc kubenswrapper[4902]: I1202 14:38:41.255323 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 14:38:41 crc kubenswrapper[4902]: I1202 14:38:41.258039 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 14:38:41 crc kubenswrapper[4902]: I1202 14:38:41.267475 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 14:38:42 crc kubenswrapper[4902]: I1202 14:38:42.142406 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 14:38:43 crc kubenswrapper[4902]: E1202 14:38:43.622823 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09ab87f8_b746_4da6_9174_02f3c052d845.slice/crio-80b3d2426a8e52f400c78ee9f9e41e683f1cd86fdcb5f0577a1a9909bdac199c.scope\": RecentStats: unable to find data in memory cache]" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.086457 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.159132 4902 generic.go:334] "Generic (PLEG): container finished" podID="09ab87f8-b746-4da6-9174-02f3c052d845" containerID="80b3d2426a8e52f400c78ee9f9e41e683f1cd86fdcb5f0577a1a9909bdac199c" exitCode=137 Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.161094 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ab87f8-b746-4da6-9174-02f3c052d845-combined-ca-bundle\") pod \"09ab87f8-b746-4da6-9174-02f3c052d845\" (UID: \"09ab87f8-b746-4da6-9174-02f3c052d845\") " Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.161172 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9w5z\" (UniqueName: \"kubernetes.io/projected/09ab87f8-b746-4da6-9174-02f3c052d845-kube-api-access-m9w5z\") pod \"09ab87f8-b746-4da6-9174-02f3c052d845\" (UID: \"09ab87f8-b746-4da6-9174-02f3c052d845\") " Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.161198 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09ab87f8-b746-4da6-9174-02f3c052d845-config-data\") pod \"09ab87f8-b746-4da6-9174-02f3c052d845\" (UID: \"09ab87f8-b746-4da6-9174-02f3c052d845\") " Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.162320 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.162643 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"09ab87f8-b746-4da6-9174-02f3c052d845","Type":"ContainerDied","Data":"80b3d2426a8e52f400c78ee9f9e41e683f1cd86fdcb5f0577a1a9909bdac199c"} Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.162856 4902 scope.go:117] "RemoveContainer" containerID="80b3d2426a8e52f400c78ee9f9e41e683f1cd86fdcb5f0577a1a9909bdac199c" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.162740 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"09ab87f8-b746-4da6-9174-02f3c052d845","Type":"ContainerDied","Data":"5237331740d372b6770c24533aade30eb594183c55849603728e0798fbbaadec"} Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.172776 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ab87f8-b746-4da6-9174-02f3c052d845-kube-api-access-m9w5z" (OuterVolumeSpecName: "kube-api-access-m9w5z") pod "09ab87f8-b746-4da6-9174-02f3c052d845" (UID: "09ab87f8-b746-4da6-9174-02f3c052d845"). InnerVolumeSpecName "kube-api-access-m9w5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.200818 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ab87f8-b746-4da6-9174-02f3c052d845-config-data" (OuterVolumeSpecName: "config-data") pod "09ab87f8-b746-4da6-9174-02f3c052d845" (UID: "09ab87f8-b746-4da6-9174-02f3c052d845"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.226187 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ab87f8-b746-4da6-9174-02f3c052d845-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "09ab87f8-b746-4da6-9174-02f3c052d845" (UID: "09ab87f8-b746-4da6-9174-02f3c052d845"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.263539 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ab87f8-b746-4da6-9174-02f3c052d845-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.263617 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9w5z\" (UniqueName: \"kubernetes.io/projected/09ab87f8-b746-4da6-9174-02f3c052d845-kube-api-access-m9w5z\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.263630 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09ab87f8-b746-4da6-9174-02f3c052d845-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.304437 4902 scope.go:117] "RemoveContainer" containerID="80b3d2426a8e52f400c78ee9f9e41e683f1cd86fdcb5f0577a1a9909bdac199c" Dec 02 14:38:44 crc kubenswrapper[4902]: E1202 14:38:44.305246 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80b3d2426a8e52f400c78ee9f9e41e683f1cd86fdcb5f0577a1a9909bdac199c\": container with ID starting with 80b3d2426a8e52f400c78ee9f9e41e683f1cd86fdcb5f0577a1a9909bdac199c not found: ID does not exist" containerID="80b3d2426a8e52f400c78ee9f9e41e683f1cd86fdcb5f0577a1a9909bdac199c" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.305340 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80b3d2426a8e52f400c78ee9f9e41e683f1cd86fdcb5f0577a1a9909bdac199c"} err="failed to get container status \"80b3d2426a8e52f400c78ee9f9e41e683f1cd86fdcb5f0577a1a9909bdac199c\": rpc error: code = NotFound desc = could not find container \"80b3d2426a8e52f400c78ee9f9e41e683f1cd86fdcb5f0577a1a9909bdac199c\": container with ID starting with 80b3d2426a8e52f400c78ee9f9e41e683f1cd86fdcb5f0577a1a9909bdac199c not found: ID does not exist" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.519676 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.537863 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.562830 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 14:38:44 crc kubenswrapper[4902]: E1202 14:38:44.564084 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09ab87f8-b746-4da6-9174-02f3c052d845" containerName="nova-cell1-novncproxy-novncproxy" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.564105 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="09ab87f8-b746-4da6-9174-02f3c052d845" containerName="nova-cell1-novncproxy-novncproxy" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.564401 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="09ab87f8-b746-4da6-9174-02f3c052d845" containerName="nova-cell1-novncproxy-novncproxy" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.565639 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.569804 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.570015 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.570139 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.582104 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.670326 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmf6d\" (UniqueName: \"kubernetes.io/projected/abcdfeb8-008e-4d99-8860-179b489f7783-kube-api-access-qmf6d\") pod \"nova-cell1-novncproxy-0\" (UID: \"abcdfeb8-008e-4d99-8860-179b489f7783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.670396 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdfeb8-008e-4d99-8860-179b489f7783-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"abcdfeb8-008e-4d99-8860-179b489f7783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.670842 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abcdfeb8-008e-4d99-8860-179b489f7783-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"abcdfeb8-008e-4d99-8860-179b489f7783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.671023 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdfeb8-008e-4d99-8860-179b489f7783-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"abcdfeb8-008e-4d99-8860-179b489f7783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.671322 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abcdfeb8-008e-4d99-8860-179b489f7783-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"abcdfeb8-008e-4d99-8860-179b489f7783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.774227 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdfeb8-008e-4d99-8860-179b489f7783-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"abcdfeb8-008e-4d99-8860-179b489f7783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.774355 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abcdfeb8-008e-4d99-8860-179b489f7783-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"abcdfeb8-008e-4d99-8860-179b489f7783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.774429 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdfeb8-008e-4d99-8860-179b489f7783-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"abcdfeb8-008e-4d99-8860-179b489f7783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.774510 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abcdfeb8-008e-4d99-8860-179b489f7783-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"abcdfeb8-008e-4d99-8860-179b489f7783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.774674 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmf6d\" (UniqueName: \"kubernetes.io/projected/abcdfeb8-008e-4d99-8860-179b489f7783-kube-api-access-qmf6d\") pod \"nova-cell1-novncproxy-0\" (UID: \"abcdfeb8-008e-4d99-8860-179b489f7783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.781208 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdfeb8-008e-4d99-8860-179b489f7783-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"abcdfeb8-008e-4d99-8860-179b489f7783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.781358 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abcdfeb8-008e-4d99-8860-179b489f7783-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"abcdfeb8-008e-4d99-8860-179b489f7783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.781484 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdfeb8-008e-4d99-8860-179b489f7783-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"abcdfeb8-008e-4d99-8860-179b489f7783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.784703 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abcdfeb8-008e-4d99-8860-179b489f7783-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"abcdfeb8-008e-4d99-8860-179b489f7783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.802506 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmf6d\" (UniqueName: \"kubernetes.io/projected/abcdfeb8-008e-4d99-8860-179b489f7783-kube-api-access-qmf6d\") pod \"nova-cell1-novncproxy-0\" (UID: \"abcdfeb8-008e-4d99-8860-179b489f7783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:44 crc kubenswrapper[4902]: I1202 14:38:44.886961 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:45 crc kubenswrapper[4902]: I1202 14:38:45.124451 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ab87f8-b746-4da6-9174-02f3c052d845" path="/var/lib/kubelet/pods/09ab87f8-b746-4da6-9174-02f3c052d845/volumes" Dec 02 14:38:45 crc kubenswrapper[4902]: I1202 14:38:45.454507 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 14:38:45 crc kubenswrapper[4902]: W1202 14:38:45.460872 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabcdfeb8_008e_4d99_8860_179b489f7783.slice/crio-a3426dafdec49eb53b0ef35c6153839eb12527d9c34f268c4e87d5733f014460 WatchSource:0}: Error finding container a3426dafdec49eb53b0ef35c6153839eb12527d9c34f268c4e87d5733f014460: Status 404 returned error can't find the container with id a3426dafdec49eb53b0ef35c6153839eb12527d9c34f268c4e87d5733f014460 Dec 02 14:38:45 crc kubenswrapper[4902]: I1202 14:38:45.617488 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 14:38:45 crc kubenswrapper[4902]: I1202 14:38:45.618931 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 14:38:45 crc kubenswrapper[4902]: I1202 14:38:45.624064 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 14:38:45 crc kubenswrapper[4902]: I1202 14:38:45.631111 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.185691 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"abcdfeb8-008e-4d99-8860-179b489f7783","Type":"ContainerStarted","Data":"c32cce8547c721e7eba482bd031ffb9a1f7e742298eb5d1a8bd937977633447b"} Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.186044 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"abcdfeb8-008e-4d99-8860-179b489f7783","Type":"ContainerStarted","Data":"a3426dafdec49eb53b0ef35c6153839eb12527d9c34f268c4e87d5733f014460"} Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.186336 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.197614 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.214943 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.214912471 podStartE2EDuration="2.214912471s" podCreationTimestamp="2025-12-02 14:38:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:38:46.202373306 +0000 UTC m=+1357.393682015" watchObservedRunningTime="2025-12-02 14:38:46.214912471 +0000 UTC m=+1357.406221180" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.402071 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6nlh8"] Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.404325 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.416433 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6nlh8"] Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.512708 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.513172 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.513349 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-config\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.513378 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.513429 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6txd\" (UniqueName: \"kubernetes.io/projected/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-kube-api-access-p6txd\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.513472 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.614997 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-config\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.615044 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.615104 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6txd\" (UniqueName: \"kubernetes.io/projected/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-kube-api-access-p6txd\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.615172 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.615783 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.615843 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.616190 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-config\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.617100 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.617167 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.618299 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.621652 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.644300 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6txd\" (UniqueName: \"kubernetes.io/projected/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-kube-api-access-p6txd\") pod \"dnsmasq-dns-89c5cd4d5-6nlh8\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:46 crc kubenswrapper[4902]: I1202 14:38:46.728305 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:47 crc kubenswrapper[4902]: I1202 14:38:47.283700 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6nlh8"] Dec 02 14:38:48 crc kubenswrapper[4902]: I1202 14:38:48.208786 4902 generic.go:334] "Generic (PLEG): container finished" podID="fb9632b1-922e-4ee3-a4e0-a3134cd34db4" containerID="057c0e91e4e185f33e2a90694a9f3b06a74d4747c05411f3f7a279da02badd70" exitCode=0 Dec 02 14:38:48 crc kubenswrapper[4902]: I1202 14:38:48.208894 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" event={"ID":"fb9632b1-922e-4ee3-a4e0-a3134cd34db4","Type":"ContainerDied","Data":"057c0e91e4e185f33e2a90694a9f3b06a74d4747c05411f3f7a279da02badd70"} Dec 02 14:38:48 crc kubenswrapper[4902]: I1202 14:38:48.209235 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" event={"ID":"fb9632b1-922e-4ee3-a4e0-a3134cd34db4","Type":"ContainerStarted","Data":"81bc3c293e1e84d751702d82b11771e730275086f59e1176bd6378052cf08621"} Dec 02 14:38:48 crc kubenswrapper[4902]: I1202 14:38:48.596887 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:38:48 crc kubenswrapper[4902]: I1202 14:38:48.597486 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="ceilometer-central-agent" containerID="cri-o://f7092e940615489ae9a1d42b018916f16216b31b96d664a719d04cbdf8f7c4da" gracePeriod=30 Dec 02 14:38:48 crc kubenswrapper[4902]: I1202 14:38:48.597645 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="proxy-httpd" containerID="cri-o://6a87787cfccfe22ace5bb57c0ac26c2944e9db55501a7a7741c70a16c63d8ec1" gracePeriod=30 Dec 02 14:38:48 crc kubenswrapper[4902]: I1202 14:38:48.597681 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="sg-core" containerID="cri-o://4da916dcb3132099de0bb8139fdf877715589589bd673bcd1a8937823f576611" gracePeriod=30 Dec 02 14:38:48 crc kubenswrapper[4902]: I1202 14:38:48.597715 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="ceilometer-notification-agent" containerID="cri-o://7f658cee83ff281db26e273f7b290de759f59c761d8a9f2cc599d292ce2e29fd" gracePeriod=30 Dec 02 14:38:48 crc kubenswrapper[4902]: I1202 14:38:48.856087 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.224252 4902 generic.go:334] "Generic (PLEG): container finished" podID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerID="6a87787cfccfe22ace5bb57c0ac26c2944e9db55501a7a7741c70a16c63d8ec1" exitCode=0 Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.224525 4902 generic.go:334] "Generic (PLEG): container finished" podID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerID="4da916dcb3132099de0bb8139fdf877715589589bd673bcd1a8937823f576611" exitCode=2 Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.224542 4902 generic.go:334] "Generic (PLEG): container finished" podID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerID="7f658cee83ff281db26e273f7b290de759f59c761d8a9f2cc599d292ce2e29fd" exitCode=0 Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.224348 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af454a0f-ab13-4256-b19a-69d30b6c818b","Type":"ContainerDied","Data":"6a87787cfccfe22ace5bb57c0ac26c2944e9db55501a7a7741c70a16c63d8ec1"} Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.224628 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af454a0f-ab13-4256-b19a-69d30b6c818b","Type":"ContainerDied","Data":"4da916dcb3132099de0bb8139fdf877715589589bd673bcd1a8937823f576611"} Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.224641 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af454a0f-ab13-4256-b19a-69d30b6c818b","Type":"ContainerDied","Data":"7f658cee83ff281db26e273f7b290de759f59c761d8a9f2cc599d292ce2e29fd"} Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.230892 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" containerName="nova-api-log" containerID="cri-o://d0e650923cfcda33a6c620b948152401f1c163f80d5ca55b9bf1d6fa477ca31d" gracePeriod=30 Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.230987 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" event={"ID":"fb9632b1-922e-4ee3-a4e0-a3134cd34db4","Type":"ContainerStarted","Data":"40bb52ca131207a02dba7c7c1c0d21d20cdf0f458b94d87b1638a436b87fcf24"} Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.231006 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" containerName="nova-api-api" containerID="cri-o://53707252471c868acc9e212e63b98ca4708099d1edafc2d82ee4c4aa01d3e442" gracePeriod=30 Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.263640 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" podStartSLOduration=3.263618397 podStartE2EDuration="3.263618397s" podCreationTimestamp="2025-12-02 14:38:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:38:49.253127439 +0000 UTC m=+1360.444436158" watchObservedRunningTime="2025-12-02 14:38:49.263618397 +0000 UTC m=+1360.454927106" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.643788 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.793703 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af454a0f-ab13-4256-b19a-69d30b6c818b-log-httpd\") pod \"af454a0f-ab13-4256-b19a-69d30b6c818b\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.794018 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-sg-core-conf-yaml\") pod \"af454a0f-ab13-4256-b19a-69d30b6c818b\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.794189 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-scripts\") pod \"af454a0f-ab13-4256-b19a-69d30b6c818b\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.794217 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-combined-ca-bundle\") pod \"af454a0f-ab13-4256-b19a-69d30b6c818b\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.794272 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmzqq\" (UniqueName: \"kubernetes.io/projected/af454a0f-ab13-4256-b19a-69d30b6c818b-kube-api-access-wmzqq\") pod \"af454a0f-ab13-4256-b19a-69d30b6c818b\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.794311 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-ceilometer-tls-certs\") pod \"af454a0f-ab13-4256-b19a-69d30b6c818b\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.794341 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af454a0f-ab13-4256-b19a-69d30b6c818b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "af454a0f-ab13-4256-b19a-69d30b6c818b" (UID: "af454a0f-ab13-4256-b19a-69d30b6c818b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.794365 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-config-data\") pod \"af454a0f-ab13-4256-b19a-69d30b6c818b\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.794978 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af454a0f-ab13-4256-b19a-69d30b6c818b-run-httpd\") pod \"af454a0f-ab13-4256-b19a-69d30b6c818b\" (UID: \"af454a0f-ab13-4256-b19a-69d30b6c818b\") " Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.795332 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af454a0f-ab13-4256-b19a-69d30b6c818b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "af454a0f-ab13-4256-b19a-69d30b6c818b" (UID: "af454a0f-ab13-4256-b19a-69d30b6c818b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.795677 4902 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af454a0f-ab13-4256-b19a-69d30b6c818b-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.795701 4902 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af454a0f-ab13-4256-b19a-69d30b6c818b-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.801365 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af454a0f-ab13-4256-b19a-69d30b6c818b-kube-api-access-wmzqq" (OuterVolumeSpecName: "kube-api-access-wmzqq") pod "af454a0f-ab13-4256-b19a-69d30b6c818b" (UID: "af454a0f-ab13-4256-b19a-69d30b6c818b"). InnerVolumeSpecName "kube-api-access-wmzqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.801382 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-scripts" (OuterVolumeSpecName: "scripts") pod "af454a0f-ab13-4256-b19a-69d30b6c818b" (UID: "af454a0f-ab13-4256-b19a-69d30b6c818b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.849013 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "af454a0f-ab13-4256-b19a-69d30b6c818b" (UID: "af454a0f-ab13-4256-b19a-69d30b6c818b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.887402 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.896940 4902 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.896964 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.896973 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmzqq\" (UniqueName: \"kubernetes.io/projected/af454a0f-ab13-4256-b19a-69d30b6c818b-kube-api-access-wmzqq\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.901787 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "af454a0f-ab13-4256-b19a-69d30b6c818b" (UID: "af454a0f-ab13-4256-b19a-69d30b6c818b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.917592 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "af454a0f-ab13-4256-b19a-69d30b6c818b" (UID: "af454a0f-ab13-4256-b19a-69d30b6c818b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.921627 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-config-data" (OuterVolumeSpecName: "config-data") pod "af454a0f-ab13-4256-b19a-69d30b6c818b" (UID: "af454a0f-ab13-4256-b19a-69d30b6c818b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.998703 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.998737 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:49 crc kubenswrapper[4902]: I1202 14:38:49.998750 4902 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/af454a0f-ab13-4256-b19a-69d30b6c818b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.241814 4902 generic.go:334] "Generic (PLEG): container finished" podID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerID="f7092e940615489ae9a1d42b018916f16216b31b96d664a719d04cbdf8f7c4da" exitCode=0 Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.241875 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af454a0f-ab13-4256-b19a-69d30b6c818b","Type":"ContainerDied","Data":"f7092e940615489ae9a1d42b018916f16216b31b96d664a719d04cbdf8f7c4da"} Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.241901 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af454a0f-ab13-4256-b19a-69d30b6c818b","Type":"ContainerDied","Data":"5aab265def0caf87ecd0f0454d08d2c181c520232680a97809a11a1759bbb458"} Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.241899 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.241928 4902 scope.go:117] "RemoveContainer" containerID="6a87787cfccfe22ace5bb57c0ac26c2944e9db55501a7a7741c70a16c63d8ec1" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.246082 4902 generic.go:334] "Generic (PLEG): container finished" podID="feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" containerID="d0e650923cfcda33a6c620b948152401f1c163f80d5ca55b9bf1d6fa477ca31d" exitCode=143 Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.246988 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa","Type":"ContainerDied","Data":"d0e650923cfcda33a6c620b948152401f1c163f80d5ca55b9bf1d6fa477ca31d"} Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.247023 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.333879 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.335450 4902 scope.go:117] "RemoveContainer" containerID="4da916dcb3132099de0bb8139fdf877715589589bd673bcd1a8937823f576611" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.345702 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.360360 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:38:50 crc kubenswrapper[4902]: E1202 14:38:50.360922 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="ceilometer-notification-agent" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.360940 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="ceilometer-notification-agent" Dec 02 14:38:50 crc kubenswrapper[4902]: E1202 14:38:50.360965 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="proxy-httpd" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.360972 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="proxy-httpd" Dec 02 14:38:50 crc kubenswrapper[4902]: E1202 14:38:50.360980 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="sg-core" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.360986 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="sg-core" Dec 02 14:38:50 crc kubenswrapper[4902]: E1202 14:38:50.361007 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="ceilometer-central-agent" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.361012 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="ceilometer-central-agent" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.361197 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="sg-core" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.361213 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="proxy-httpd" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.361279 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="ceilometer-notification-agent" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.361292 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" containerName="ceilometer-central-agent" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.363899 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.368022 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.368192 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.368469 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.375540 4902 scope.go:117] "RemoveContainer" containerID="7f658cee83ff281db26e273f7b290de759f59c761d8a9f2cc599d292ce2e29fd" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.380937 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.408883 4902 scope.go:117] "RemoveContainer" containerID="f7092e940615489ae9a1d42b018916f16216b31b96d664a719d04cbdf8f7c4da" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.438463 4902 scope.go:117] "RemoveContainer" containerID="6a87787cfccfe22ace5bb57c0ac26c2944e9db55501a7a7741c70a16c63d8ec1" Dec 02 14:38:50 crc kubenswrapper[4902]: E1202 14:38:50.439022 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a87787cfccfe22ace5bb57c0ac26c2944e9db55501a7a7741c70a16c63d8ec1\": container with ID starting with 6a87787cfccfe22ace5bb57c0ac26c2944e9db55501a7a7741c70a16c63d8ec1 not found: ID does not exist" containerID="6a87787cfccfe22ace5bb57c0ac26c2944e9db55501a7a7741c70a16c63d8ec1" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.439053 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a87787cfccfe22ace5bb57c0ac26c2944e9db55501a7a7741c70a16c63d8ec1"} err="failed to get container status \"6a87787cfccfe22ace5bb57c0ac26c2944e9db55501a7a7741c70a16c63d8ec1\": rpc error: code = NotFound desc = could not find container \"6a87787cfccfe22ace5bb57c0ac26c2944e9db55501a7a7741c70a16c63d8ec1\": container with ID starting with 6a87787cfccfe22ace5bb57c0ac26c2944e9db55501a7a7741c70a16c63d8ec1 not found: ID does not exist" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.439073 4902 scope.go:117] "RemoveContainer" containerID="4da916dcb3132099de0bb8139fdf877715589589bd673bcd1a8937823f576611" Dec 02 14:38:50 crc kubenswrapper[4902]: E1202 14:38:50.439491 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4da916dcb3132099de0bb8139fdf877715589589bd673bcd1a8937823f576611\": container with ID starting with 4da916dcb3132099de0bb8139fdf877715589589bd673bcd1a8937823f576611 not found: ID does not exist" containerID="4da916dcb3132099de0bb8139fdf877715589589bd673bcd1a8937823f576611" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.439514 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4da916dcb3132099de0bb8139fdf877715589589bd673bcd1a8937823f576611"} err="failed to get container status \"4da916dcb3132099de0bb8139fdf877715589589bd673bcd1a8937823f576611\": rpc error: code = NotFound desc = could not find container \"4da916dcb3132099de0bb8139fdf877715589589bd673bcd1a8937823f576611\": container with ID starting with 4da916dcb3132099de0bb8139fdf877715589589bd673bcd1a8937823f576611 not found: ID does not exist" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.439528 4902 scope.go:117] "RemoveContainer" containerID="7f658cee83ff281db26e273f7b290de759f59c761d8a9f2cc599d292ce2e29fd" Dec 02 14:38:50 crc kubenswrapper[4902]: E1202 14:38:50.439770 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f658cee83ff281db26e273f7b290de759f59c761d8a9f2cc599d292ce2e29fd\": container with ID starting with 7f658cee83ff281db26e273f7b290de759f59c761d8a9f2cc599d292ce2e29fd not found: ID does not exist" containerID="7f658cee83ff281db26e273f7b290de759f59c761d8a9f2cc599d292ce2e29fd" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.439788 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f658cee83ff281db26e273f7b290de759f59c761d8a9f2cc599d292ce2e29fd"} err="failed to get container status \"7f658cee83ff281db26e273f7b290de759f59c761d8a9f2cc599d292ce2e29fd\": rpc error: code = NotFound desc = could not find container \"7f658cee83ff281db26e273f7b290de759f59c761d8a9f2cc599d292ce2e29fd\": container with ID starting with 7f658cee83ff281db26e273f7b290de759f59c761d8a9f2cc599d292ce2e29fd not found: ID does not exist" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.439822 4902 scope.go:117] "RemoveContainer" containerID="f7092e940615489ae9a1d42b018916f16216b31b96d664a719d04cbdf8f7c4da" Dec 02 14:38:50 crc kubenswrapper[4902]: E1202 14:38:50.440174 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7092e940615489ae9a1d42b018916f16216b31b96d664a719d04cbdf8f7c4da\": container with ID starting with f7092e940615489ae9a1d42b018916f16216b31b96d664a719d04cbdf8f7c4da not found: ID does not exist" containerID="f7092e940615489ae9a1d42b018916f16216b31b96d664a719d04cbdf8f7c4da" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.440267 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7092e940615489ae9a1d42b018916f16216b31b96d664a719d04cbdf8f7c4da"} err="failed to get container status \"f7092e940615489ae9a1d42b018916f16216b31b96d664a719d04cbdf8f7c4da\": rpc error: code = NotFound desc = could not find container \"f7092e940615489ae9a1d42b018916f16216b31b96d664a719d04cbdf8f7c4da\": container with ID starting with f7092e940615489ae9a1d42b018916f16216b31b96d664a719d04cbdf8f7c4da not found: ID does not exist" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.509013 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-config-data\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.509147 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.509195 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvhdk\" (UniqueName: \"kubernetes.io/projected/956e1c8e-f46e-4020-bc0e-573973ff1460-kube-api-access-mvhdk\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.509260 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956e1c8e-f46e-4020-bc0e-573973ff1460-run-httpd\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.509337 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.509398 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-scripts\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.509498 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.509601 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956e1c8e-f46e-4020-bc0e-573973ff1460-log-httpd\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.610855 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-scripts\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.611217 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.611267 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956e1c8e-f46e-4020-bc0e-573973ff1460-log-httpd\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.611296 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-config-data\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.611366 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.611388 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvhdk\" (UniqueName: \"kubernetes.io/projected/956e1c8e-f46e-4020-bc0e-573973ff1460-kube-api-access-mvhdk\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.611417 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956e1c8e-f46e-4020-bc0e-573973ff1460-run-httpd\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.611449 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.612306 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956e1c8e-f46e-4020-bc0e-573973ff1460-log-httpd\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.612938 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956e1c8e-f46e-4020-bc0e-573973ff1460-run-httpd\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.615686 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-scripts\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.616144 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.616275 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.618675 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-config-data\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.619360 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.632166 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvhdk\" (UniqueName: \"kubernetes.io/projected/956e1c8e-f46e-4020-bc0e-573973ff1460-kube-api-access-mvhdk\") pod \"ceilometer-0\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " pod="openstack/ceilometer-0" Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.683772 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:38:50 crc kubenswrapper[4902]: I1202 14:38:50.684732 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:38:51 crc kubenswrapper[4902]: I1202 14:38:51.122887 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af454a0f-ab13-4256-b19a-69d30b6c818b" path="/var/lib/kubelet/pods/af454a0f-ab13-4256-b19a-69d30b6c818b/volumes" Dec 02 14:38:51 crc kubenswrapper[4902]: I1202 14:38:51.194713 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:38:51 crc kubenswrapper[4902]: I1202 14:38:51.260075 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956e1c8e-f46e-4020-bc0e-573973ff1460","Type":"ContainerStarted","Data":"66b0157bf918c1909e00436725ecdec1b4ecfcd55628fe399ee0889b4cc0d911"} Dec 02 14:38:52 crc kubenswrapper[4902]: I1202 14:38:52.829828 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 14:38:52 crc kubenswrapper[4902]: I1202 14:38:52.964678 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-logs\") pod \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " Dec 02 14:38:52 crc kubenswrapper[4902]: I1202 14:38:52.964836 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-combined-ca-bundle\") pod \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " Dec 02 14:38:52 crc kubenswrapper[4902]: I1202 14:38:52.964987 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvbf8\" (UniqueName: \"kubernetes.io/projected/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-kube-api-access-lvbf8\") pod \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " Dec 02 14:38:52 crc kubenswrapper[4902]: I1202 14:38:52.965021 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-config-data\") pod \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\" (UID: \"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa\") " Dec 02 14:38:52 crc kubenswrapper[4902]: I1202 14:38:52.965161 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-logs" (OuterVolumeSpecName: "logs") pod "feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" (UID: "feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:38:52 crc kubenswrapper[4902]: I1202 14:38:52.965474 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:52 crc kubenswrapper[4902]: I1202 14:38:52.969836 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-kube-api-access-lvbf8" (OuterVolumeSpecName: "kube-api-access-lvbf8") pod "feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" (UID: "feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa"). InnerVolumeSpecName "kube-api-access-lvbf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:38:52 crc kubenswrapper[4902]: I1202 14:38:52.993242 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" (UID: "feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.007799 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-config-data" (OuterVolumeSpecName: "config-data") pod "feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" (UID: "feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.069179 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.069213 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvbf8\" (UniqueName: \"kubernetes.io/projected/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-kube-api-access-lvbf8\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.069226 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.294195 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956e1c8e-f46e-4020-bc0e-573973ff1460","Type":"ContainerStarted","Data":"f017e04825e9a2a51478e9f3a566e47c27b5e63cb6fe1ad305a863a2d73d6407"} Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.296800 4902 generic.go:334] "Generic (PLEG): container finished" podID="feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" containerID="53707252471c868acc9e212e63b98ca4708099d1edafc2d82ee4c4aa01d3e442" exitCode=0 Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.296937 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa","Type":"ContainerDied","Data":"53707252471c868acc9e212e63b98ca4708099d1edafc2d82ee4c4aa01d3e442"} Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.297006 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa","Type":"ContainerDied","Data":"bdfdefb2b607b80012b1431d569da80c81e89a77ba8833329b47a64d51f2e820"} Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.297079 4902 scope.go:117] "RemoveContainer" containerID="53707252471c868acc9e212e63b98ca4708099d1edafc2d82ee4c4aa01d3e442" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.297280 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.329826 4902 scope.go:117] "RemoveContainer" containerID="d0e650923cfcda33a6c620b948152401f1c163f80d5ca55b9bf1d6fa477ca31d" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.356606 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.366906 4902 scope.go:117] "RemoveContainer" containerID="53707252471c868acc9e212e63b98ca4708099d1edafc2d82ee4c4aa01d3e442" Dec 02 14:38:53 crc kubenswrapper[4902]: E1202 14:38:53.371845 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53707252471c868acc9e212e63b98ca4708099d1edafc2d82ee4c4aa01d3e442\": container with ID starting with 53707252471c868acc9e212e63b98ca4708099d1edafc2d82ee4c4aa01d3e442 not found: ID does not exist" containerID="53707252471c868acc9e212e63b98ca4708099d1edafc2d82ee4c4aa01d3e442" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.371905 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53707252471c868acc9e212e63b98ca4708099d1edafc2d82ee4c4aa01d3e442"} err="failed to get container status \"53707252471c868acc9e212e63b98ca4708099d1edafc2d82ee4c4aa01d3e442\": rpc error: code = NotFound desc = could not find container \"53707252471c868acc9e212e63b98ca4708099d1edafc2d82ee4c4aa01d3e442\": container with ID starting with 53707252471c868acc9e212e63b98ca4708099d1edafc2d82ee4c4aa01d3e442 not found: ID does not exist" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.371933 4902 scope.go:117] "RemoveContainer" containerID="d0e650923cfcda33a6c620b948152401f1c163f80d5ca55b9bf1d6fa477ca31d" Dec 02 14:38:53 crc kubenswrapper[4902]: E1202 14:38:53.373856 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0e650923cfcda33a6c620b948152401f1c163f80d5ca55b9bf1d6fa477ca31d\": container with ID starting with d0e650923cfcda33a6c620b948152401f1c163f80d5ca55b9bf1d6fa477ca31d not found: ID does not exist" containerID="d0e650923cfcda33a6c620b948152401f1c163f80d5ca55b9bf1d6fa477ca31d" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.373916 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0e650923cfcda33a6c620b948152401f1c163f80d5ca55b9bf1d6fa477ca31d"} err="failed to get container status \"d0e650923cfcda33a6c620b948152401f1c163f80d5ca55b9bf1d6fa477ca31d\": rpc error: code = NotFound desc = could not find container \"d0e650923cfcda33a6c620b948152401f1c163f80d5ca55b9bf1d6fa477ca31d\": container with ID starting with d0e650923cfcda33a6c620b948152401f1c163f80d5ca55b9bf1d6fa477ca31d not found: ID does not exist" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.373972 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.409595 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 14:38:53 crc kubenswrapper[4902]: E1202 14:38:53.410069 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" containerName="nova-api-api" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.410083 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" containerName="nova-api-api" Dec 02 14:38:53 crc kubenswrapper[4902]: E1202 14:38:53.410097 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" containerName="nova-api-log" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.410102 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" containerName="nova-api-log" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.410276 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" containerName="nova-api-api" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.410287 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" containerName="nova-api-log" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.411878 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.414802 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.415898 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.416336 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.418935 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.476157 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.476272 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-logs\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.476330 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.476347 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-public-tls-certs\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.476373 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xwk6\" (UniqueName: \"kubernetes.io/projected/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-kube-api-access-7xwk6\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.476393 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-config-data\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.578091 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.578514 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-public-tls-certs\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.578591 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xwk6\" (UniqueName: \"kubernetes.io/projected/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-kube-api-access-7xwk6\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.578623 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-config-data\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.578742 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.578923 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-logs\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.579314 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-logs\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.582825 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.583879 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.584339 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-config-data\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.588270 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-public-tls-certs\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.602203 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xwk6\" (UniqueName: \"kubernetes.io/projected/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-kube-api-access-7xwk6\") pod \"nova-api-0\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " pod="openstack/nova-api-0" Dec 02 14:38:53 crc kubenswrapper[4902]: I1202 14:38:53.731011 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 14:38:54 crc kubenswrapper[4902]: I1202 14:38:54.272964 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:38:54 crc kubenswrapper[4902]: I1202 14:38:54.311176 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b","Type":"ContainerStarted","Data":"23c4d8144da270445dcc568fde5b6755196e19d5191d05916195a2d665c81dab"} Dec 02 14:38:54 crc kubenswrapper[4902]: I1202 14:38:54.314211 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956e1c8e-f46e-4020-bc0e-573973ff1460","Type":"ContainerStarted","Data":"2384ad9d23840b6f62b6300dbcefe409b153be4eb2c4f954b23477ebbee37b60"} Dec 02 14:38:54 crc kubenswrapper[4902]: I1202 14:38:54.887511 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:54 crc kubenswrapper[4902]: I1202 14:38:54.910806 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.124124 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa" path="/var/lib/kubelet/pods/feb37bb4-d2c3-4d3a-af35-5a184ef8b6aa/volumes" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.325364 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b","Type":"ContainerStarted","Data":"4674402171b0796884a5c4c3dfe09579a12a035e82635e6d5d2ddd633efdc3d2"} Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.325789 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b","Type":"ContainerStarted","Data":"4224b053cc492d3995ba4080851967da18d49cf95e9d61abe8073d81ae918241"} Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.328267 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956e1c8e-f46e-4020-bc0e-573973ff1460","Type":"ContainerStarted","Data":"c301ed4680efa3ceee32ede892bd9e371f53a6f46f9be85b52013280081a69cf"} Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.345001 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.344986113 podStartE2EDuration="2.344986113s" podCreationTimestamp="2025-12-02 14:38:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:38:55.344080527 +0000 UTC m=+1366.535389296" watchObservedRunningTime="2025-12-02 14:38:55.344986113 +0000 UTC m=+1366.536294832" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.354645 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.553361 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-r24tk"] Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.558498 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.561601 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.561985 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-r24tk"] Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.562119 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.729978 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-scripts\") pod \"nova-cell1-cell-mapping-r24tk\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.730025 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d22zj\" (UniqueName: \"kubernetes.io/projected/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-kube-api-access-d22zj\") pod \"nova-cell1-cell-mapping-r24tk\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.730061 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-r24tk\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.730173 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-config-data\") pod \"nova-cell1-cell-mapping-r24tk\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.831992 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d22zj\" (UniqueName: \"kubernetes.io/projected/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-kube-api-access-d22zj\") pod \"nova-cell1-cell-mapping-r24tk\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.832393 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-r24tk\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.832551 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-config-data\") pod \"nova-cell1-cell-mapping-r24tk\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.832641 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-scripts\") pod \"nova-cell1-cell-mapping-r24tk\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.836737 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-scripts\") pod \"nova-cell1-cell-mapping-r24tk\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.840012 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-r24tk\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.852704 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-config-data\") pod \"nova-cell1-cell-mapping-r24tk\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:38:55 crc kubenswrapper[4902]: I1202 14:38:55.853421 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d22zj\" (UniqueName: \"kubernetes.io/projected/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-kube-api-access-d22zj\") pod \"nova-cell1-cell-mapping-r24tk\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:38:56 crc kubenswrapper[4902]: I1202 14:38:56.048702 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:38:56 crc kubenswrapper[4902]: I1202 14:38:56.346466 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="ceilometer-central-agent" containerID="cri-o://f017e04825e9a2a51478e9f3a566e47c27b5e63cb6fe1ad305a863a2d73d6407" gracePeriod=30 Dec 02 14:38:56 crc kubenswrapper[4902]: I1202 14:38:56.347023 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956e1c8e-f46e-4020-bc0e-573973ff1460","Type":"ContainerStarted","Data":"40d99fdf9f42dac5dd74788b390050f4395eb7e1c44b7113b2fc87213b3a06fa"} Dec 02 14:38:56 crc kubenswrapper[4902]: I1202 14:38:56.347053 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="sg-core" containerID="cri-o://c301ed4680efa3ceee32ede892bd9e371f53a6f46f9be85b52013280081a69cf" gracePeriod=30 Dec 02 14:38:56 crc kubenswrapper[4902]: I1202 14:38:56.347073 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 14:38:56 crc kubenswrapper[4902]: I1202 14:38:56.347082 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="proxy-httpd" containerID="cri-o://40d99fdf9f42dac5dd74788b390050f4395eb7e1c44b7113b2fc87213b3a06fa" gracePeriod=30 Dec 02 14:38:56 crc kubenswrapper[4902]: I1202 14:38:56.347125 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="ceilometer-notification-agent" containerID="cri-o://2384ad9d23840b6f62b6300dbcefe409b153be4eb2c4f954b23477ebbee37b60" gracePeriod=30 Dec 02 14:38:56 crc kubenswrapper[4902]: I1202 14:38:56.372054 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.048288183 podStartE2EDuration="6.372036191s" podCreationTimestamp="2025-12-02 14:38:50 +0000 UTC" firstStartedPulling="2025-12-02 14:38:51.200981066 +0000 UTC m=+1362.392289775" lastFinishedPulling="2025-12-02 14:38:55.524729054 +0000 UTC m=+1366.716037783" observedRunningTime="2025-12-02 14:38:56.364869138 +0000 UTC m=+1367.556177847" watchObservedRunningTime="2025-12-02 14:38:56.372036191 +0000 UTC m=+1367.563344900" Dec 02 14:38:56 crc kubenswrapper[4902]: I1202 14:38:56.577834 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-r24tk"] Dec 02 14:38:56 crc kubenswrapper[4902]: W1202 14:38:56.585921 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0cbd9373_5a7f_4bfa_90e9_b3e2ad7e763a.slice/crio-95c46475f9a400275142263f819a141be20e707f038dd774680b573a195d3141 WatchSource:0}: Error finding container 95c46475f9a400275142263f819a141be20e707f038dd774680b573a195d3141: Status 404 returned error can't find the container with id 95c46475f9a400275142263f819a141be20e707f038dd774680b573a195d3141 Dec 02 14:38:56 crc kubenswrapper[4902]: I1202 14:38:56.729909 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:38:56 crc kubenswrapper[4902]: I1202 14:38:56.803755 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-zrdjh"] Dec 02 14:38:56 crc kubenswrapper[4902]: I1202 14:38:56.803998 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" podUID="0bd2fab9-91aa-446a-ac26-a404f7b07991" containerName="dnsmasq-dns" containerID="cri-o://262b6b6bced72ecfcaa62d4a2c35db7816b1312b1d25cdad15d0372c5fa70532" gracePeriod=10 Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.170932 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.268135 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlzrg\" (UniqueName: \"kubernetes.io/projected/0bd2fab9-91aa-446a-ac26-a404f7b07991-kube-api-access-nlzrg\") pod \"0bd2fab9-91aa-446a-ac26-a404f7b07991\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.268448 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-dns-swift-storage-0\") pod \"0bd2fab9-91aa-446a-ac26-a404f7b07991\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.268509 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-dns-svc\") pod \"0bd2fab9-91aa-446a-ac26-a404f7b07991\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.268546 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-config\") pod \"0bd2fab9-91aa-446a-ac26-a404f7b07991\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.268624 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-ovsdbserver-nb\") pod \"0bd2fab9-91aa-446a-ac26-a404f7b07991\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.268686 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-ovsdbserver-sb\") pod \"0bd2fab9-91aa-446a-ac26-a404f7b07991\" (UID: \"0bd2fab9-91aa-446a-ac26-a404f7b07991\") " Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.290718 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bd2fab9-91aa-446a-ac26-a404f7b07991-kube-api-access-nlzrg" (OuterVolumeSpecName: "kube-api-access-nlzrg") pod "0bd2fab9-91aa-446a-ac26-a404f7b07991" (UID: "0bd2fab9-91aa-446a-ac26-a404f7b07991"). InnerVolumeSpecName "kube-api-access-nlzrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.326094 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0bd2fab9-91aa-446a-ac26-a404f7b07991" (UID: "0bd2fab9-91aa-446a-ac26-a404f7b07991"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.330237 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0bd2fab9-91aa-446a-ac26-a404f7b07991" (UID: "0bd2fab9-91aa-446a-ac26-a404f7b07991"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.334515 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0bd2fab9-91aa-446a-ac26-a404f7b07991" (UID: "0bd2fab9-91aa-446a-ac26-a404f7b07991"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.336092 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-config" (OuterVolumeSpecName: "config") pod "0bd2fab9-91aa-446a-ac26-a404f7b07991" (UID: "0bd2fab9-91aa-446a-ac26-a404f7b07991"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.337149 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0bd2fab9-91aa-446a-ac26-a404f7b07991" (UID: "0bd2fab9-91aa-446a-ac26-a404f7b07991"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.355416 4902 generic.go:334] "Generic (PLEG): container finished" podID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerID="40d99fdf9f42dac5dd74788b390050f4395eb7e1c44b7113b2fc87213b3a06fa" exitCode=0 Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.355450 4902 generic.go:334] "Generic (PLEG): container finished" podID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerID="c301ed4680efa3ceee32ede892bd9e371f53a6f46f9be85b52013280081a69cf" exitCode=2 Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.355456 4902 generic.go:334] "Generic (PLEG): container finished" podID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerID="2384ad9d23840b6f62b6300dbcefe409b153be4eb2c4f954b23477ebbee37b60" exitCode=0 Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.355488 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956e1c8e-f46e-4020-bc0e-573973ff1460","Type":"ContainerDied","Data":"40d99fdf9f42dac5dd74788b390050f4395eb7e1c44b7113b2fc87213b3a06fa"} Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.355514 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956e1c8e-f46e-4020-bc0e-573973ff1460","Type":"ContainerDied","Data":"c301ed4680efa3ceee32ede892bd9e371f53a6f46f9be85b52013280081a69cf"} Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.355523 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956e1c8e-f46e-4020-bc0e-573973ff1460","Type":"ContainerDied","Data":"2384ad9d23840b6f62b6300dbcefe409b153be4eb2c4f954b23477ebbee37b60"} Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.356967 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-r24tk" event={"ID":"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a","Type":"ContainerStarted","Data":"1aadb9450c881314ebb72943ddf6f16dd075cdc525183bbeac66aacae169c39b"} Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.357028 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-r24tk" event={"ID":"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a","Type":"ContainerStarted","Data":"95c46475f9a400275142263f819a141be20e707f038dd774680b573a195d3141"} Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.358915 4902 generic.go:334] "Generic (PLEG): container finished" podID="0bd2fab9-91aa-446a-ac26-a404f7b07991" containerID="262b6b6bced72ecfcaa62d4a2c35db7816b1312b1d25cdad15d0372c5fa70532" exitCode=0 Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.359092 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" event={"ID":"0bd2fab9-91aa-446a-ac26-a404f7b07991","Type":"ContainerDied","Data":"262b6b6bced72ecfcaa62d4a2c35db7816b1312b1d25cdad15d0372c5fa70532"} Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.359171 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" event={"ID":"0bd2fab9-91aa-446a-ac26-a404f7b07991","Type":"ContainerDied","Data":"119e9b5e25efb15cc64d7a61e03dc3538c6030182d5fd27d922f4d27e84ae7c3"} Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.359227 4902 scope.go:117] "RemoveContainer" containerID="262b6b6bced72ecfcaa62d4a2c35db7816b1312b1d25cdad15d0372c5fa70532" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.359238 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-zrdjh" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.370796 4902 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.370909 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.370972 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.371061 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.371133 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0bd2fab9-91aa-446a-ac26-a404f7b07991-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.371233 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlzrg\" (UniqueName: \"kubernetes.io/projected/0bd2fab9-91aa-446a-ac26-a404f7b07991-kube-api-access-nlzrg\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.377809 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-r24tk" podStartSLOduration=2.377789667 podStartE2EDuration="2.377789667s" podCreationTimestamp="2025-12-02 14:38:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:38:57.376217692 +0000 UTC m=+1368.567526391" watchObservedRunningTime="2025-12-02 14:38:57.377789667 +0000 UTC m=+1368.569098386" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.426021 4902 scope.go:117] "RemoveContainer" containerID="6974c5a66986c47cd7e4317473b42a9fac7827c6bc787b13749bc441db58f520" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.445301 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-zrdjh"] Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.461088 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-zrdjh"] Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.484608 4902 scope.go:117] "RemoveContainer" containerID="262b6b6bced72ecfcaa62d4a2c35db7816b1312b1d25cdad15d0372c5fa70532" Dec 02 14:38:57 crc kubenswrapper[4902]: E1202 14:38:57.485269 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"262b6b6bced72ecfcaa62d4a2c35db7816b1312b1d25cdad15d0372c5fa70532\": container with ID starting with 262b6b6bced72ecfcaa62d4a2c35db7816b1312b1d25cdad15d0372c5fa70532 not found: ID does not exist" containerID="262b6b6bced72ecfcaa62d4a2c35db7816b1312b1d25cdad15d0372c5fa70532" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.485318 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"262b6b6bced72ecfcaa62d4a2c35db7816b1312b1d25cdad15d0372c5fa70532"} err="failed to get container status \"262b6b6bced72ecfcaa62d4a2c35db7816b1312b1d25cdad15d0372c5fa70532\": rpc error: code = NotFound desc = could not find container \"262b6b6bced72ecfcaa62d4a2c35db7816b1312b1d25cdad15d0372c5fa70532\": container with ID starting with 262b6b6bced72ecfcaa62d4a2c35db7816b1312b1d25cdad15d0372c5fa70532 not found: ID does not exist" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.485342 4902 scope.go:117] "RemoveContainer" containerID="6974c5a66986c47cd7e4317473b42a9fac7827c6bc787b13749bc441db58f520" Dec 02 14:38:57 crc kubenswrapper[4902]: E1202 14:38:57.485582 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6974c5a66986c47cd7e4317473b42a9fac7827c6bc787b13749bc441db58f520\": container with ID starting with 6974c5a66986c47cd7e4317473b42a9fac7827c6bc787b13749bc441db58f520 not found: ID does not exist" containerID="6974c5a66986c47cd7e4317473b42a9fac7827c6bc787b13749bc441db58f520" Dec 02 14:38:57 crc kubenswrapper[4902]: I1202 14:38:57.485602 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6974c5a66986c47cd7e4317473b42a9fac7827c6bc787b13749bc441db58f520"} err="failed to get container status \"6974c5a66986c47cd7e4317473b42a9fac7827c6bc787b13749bc441db58f520\": rpc error: code = NotFound desc = could not find container \"6974c5a66986c47cd7e4317473b42a9fac7827c6bc787b13749bc441db58f520\": container with ID starting with 6974c5a66986c47cd7e4317473b42a9fac7827c6bc787b13749bc441db58f520 not found: ID does not exist" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.128157 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bd2fab9-91aa-446a-ac26-a404f7b07991" path="/var/lib/kubelet/pods/0bd2fab9-91aa-446a-ac26-a404f7b07991/volumes" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.387906 4902 generic.go:334] "Generic (PLEG): container finished" podID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerID="f017e04825e9a2a51478e9f3a566e47c27b5e63cb6fe1ad305a863a2d73d6407" exitCode=0 Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.387977 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956e1c8e-f46e-4020-bc0e-573973ff1460","Type":"ContainerDied","Data":"f017e04825e9a2a51478e9f3a566e47c27b5e63cb6fe1ad305a863a2d73d6407"} Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.571291 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.724917 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-scripts\") pod \"956e1c8e-f46e-4020-bc0e-573973ff1460\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.724983 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-sg-core-conf-yaml\") pod \"956e1c8e-f46e-4020-bc0e-573973ff1460\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.725083 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956e1c8e-f46e-4020-bc0e-573973ff1460-log-httpd\") pod \"956e1c8e-f46e-4020-bc0e-573973ff1460\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.725169 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-combined-ca-bundle\") pod \"956e1c8e-f46e-4020-bc0e-573973ff1460\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.725230 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-config-data\") pod \"956e1c8e-f46e-4020-bc0e-573973ff1460\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.725255 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956e1c8e-f46e-4020-bc0e-573973ff1460-run-httpd\") pod \"956e1c8e-f46e-4020-bc0e-573973ff1460\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.725339 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvhdk\" (UniqueName: \"kubernetes.io/projected/956e1c8e-f46e-4020-bc0e-573973ff1460-kube-api-access-mvhdk\") pod \"956e1c8e-f46e-4020-bc0e-573973ff1460\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.725387 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-ceilometer-tls-certs\") pod \"956e1c8e-f46e-4020-bc0e-573973ff1460\" (UID: \"956e1c8e-f46e-4020-bc0e-573973ff1460\") " Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.725551 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/956e1c8e-f46e-4020-bc0e-573973ff1460-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "956e1c8e-f46e-4020-bc0e-573973ff1460" (UID: "956e1c8e-f46e-4020-bc0e-573973ff1460"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.725652 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/956e1c8e-f46e-4020-bc0e-573973ff1460-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "956e1c8e-f46e-4020-bc0e-573973ff1460" (UID: "956e1c8e-f46e-4020-bc0e-573973ff1460"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.726351 4902 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956e1c8e-f46e-4020-bc0e-573973ff1460-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.726389 4902 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956e1c8e-f46e-4020-bc0e-573973ff1460-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.749620 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-scripts" (OuterVolumeSpecName: "scripts") pod "956e1c8e-f46e-4020-bc0e-573973ff1460" (UID: "956e1c8e-f46e-4020-bc0e-573973ff1460"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.777819 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/956e1c8e-f46e-4020-bc0e-573973ff1460-kube-api-access-mvhdk" (OuterVolumeSpecName: "kube-api-access-mvhdk") pod "956e1c8e-f46e-4020-bc0e-573973ff1460" (UID: "956e1c8e-f46e-4020-bc0e-573973ff1460"). InnerVolumeSpecName "kube-api-access-mvhdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.796765 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "956e1c8e-f46e-4020-bc0e-573973ff1460" (UID: "956e1c8e-f46e-4020-bc0e-573973ff1460"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.828965 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvhdk\" (UniqueName: \"kubernetes.io/projected/956e1c8e-f46e-4020-bc0e-573973ff1460-kube-api-access-mvhdk\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.829319 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.829334 4902 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.925716 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "956e1c8e-f46e-4020-bc0e-573973ff1460" (UID: "956e1c8e-f46e-4020-bc0e-573973ff1460"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.931075 4902 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.938540 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "956e1c8e-f46e-4020-bc0e-573973ff1460" (UID: "956e1c8e-f46e-4020-bc0e-573973ff1460"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:38:59 crc kubenswrapper[4902]: I1202 14:38:59.956088 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-config-data" (OuterVolumeSpecName: "config-data") pod "956e1c8e-f46e-4020-bc0e-573973ff1460" (UID: "956e1c8e-f46e-4020-bc0e-573973ff1460"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.033042 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.033077 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/956e1c8e-f46e-4020-bc0e-573973ff1460-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.404844 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956e1c8e-f46e-4020-bc0e-573973ff1460","Type":"ContainerDied","Data":"66b0157bf918c1909e00436725ecdec1b4ecfcd55628fe399ee0889b4cc0d911"} Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.404891 4902 scope.go:117] "RemoveContainer" containerID="40d99fdf9f42dac5dd74788b390050f4395eb7e1c44b7113b2fc87213b3a06fa" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.405444 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.446142 4902 scope.go:117] "RemoveContainer" containerID="c301ed4680efa3ceee32ede892bd9e371f53a6f46f9be85b52013280081a69cf" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.446183 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.465386 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.476340 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:39:00 crc kubenswrapper[4902]: E1202 14:39:00.477080 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bd2fab9-91aa-446a-ac26-a404f7b07991" containerName="init" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.477151 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bd2fab9-91aa-446a-ac26-a404f7b07991" containerName="init" Dec 02 14:39:00 crc kubenswrapper[4902]: E1202 14:39:00.477221 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="ceilometer-central-agent" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.477301 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="ceilometer-central-agent" Dec 02 14:39:00 crc kubenswrapper[4902]: E1202 14:39:00.477396 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="sg-core" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.477449 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="sg-core" Dec 02 14:39:00 crc kubenswrapper[4902]: E1202 14:39:00.477518 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="ceilometer-notification-agent" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.477584 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="ceilometer-notification-agent" Dec 02 14:39:00 crc kubenswrapper[4902]: E1202 14:39:00.477657 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="proxy-httpd" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.477708 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="proxy-httpd" Dec 02 14:39:00 crc kubenswrapper[4902]: E1202 14:39:00.477776 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bd2fab9-91aa-446a-ac26-a404f7b07991" containerName="dnsmasq-dns" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.477827 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bd2fab9-91aa-446a-ac26-a404f7b07991" containerName="dnsmasq-dns" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.478139 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="ceilometer-central-agent" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.478208 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="proxy-httpd" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.478266 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bd2fab9-91aa-446a-ac26-a404f7b07991" containerName="dnsmasq-dns" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.478343 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="ceilometer-notification-agent" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.478299 4902 scope.go:117] "RemoveContainer" containerID="2384ad9d23840b6f62b6300dbcefe409b153be4eb2c4f954b23477ebbee37b60" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.478408 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" containerName="sg-core" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.480648 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.492068 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.492286 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.492869 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.503021 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.512173 4902 scope.go:117] "RemoveContainer" containerID="f017e04825e9a2a51478e9f3a566e47c27b5e63cb6fe1ad305a863a2d73d6407" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.644541 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/966e0415-09e4-4b17-9806-7d6e570ed19a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.644646 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/966e0415-09e4-4b17-9806-7d6e570ed19a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.644868 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/966e0415-09e4-4b17-9806-7d6e570ed19a-run-httpd\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.644934 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/966e0415-09e4-4b17-9806-7d6e570ed19a-log-httpd\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.644976 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/966e0415-09e4-4b17-9806-7d6e570ed19a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.645261 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/966e0415-09e4-4b17-9806-7d6e570ed19a-config-data\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.645346 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2xw5\" (UniqueName: \"kubernetes.io/projected/966e0415-09e4-4b17-9806-7d6e570ed19a-kube-api-access-w2xw5\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.645435 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/966e0415-09e4-4b17-9806-7d6e570ed19a-scripts\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.747296 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/966e0415-09e4-4b17-9806-7d6e570ed19a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.747381 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/966e0415-09e4-4b17-9806-7d6e570ed19a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.747466 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/966e0415-09e4-4b17-9806-7d6e570ed19a-run-httpd\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.747500 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/966e0415-09e4-4b17-9806-7d6e570ed19a-log-httpd\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.747526 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/966e0415-09e4-4b17-9806-7d6e570ed19a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.747613 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/966e0415-09e4-4b17-9806-7d6e570ed19a-config-data\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.747658 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2xw5\" (UniqueName: \"kubernetes.io/projected/966e0415-09e4-4b17-9806-7d6e570ed19a-kube-api-access-w2xw5\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.747715 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/966e0415-09e4-4b17-9806-7d6e570ed19a-scripts\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.748871 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/966e0415-09e4-4b17-9806-7d6e570ed19a-log-httpd\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.749977 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/966e0415-09e4-4b17-9806-7d6e570ed19a-run-httpd\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.753371 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/966e0415-09e4-4b17-9806-7d6e570ed19a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.754631 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/966e0415-09e4-4b17-9806-7d6e570ed19a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.754933 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/966e0415-09e4-4b17-9806-7d6e570ed19a-scripts\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.757960 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/966e0415-09e4-4b17-9806-7d6e570ed19a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.769775 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/966e0415-09e4-4b17-9806-7d6e570ed19a-config-data\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.783158 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2xw5\" (UniqueName: \"kubernetes.io/projected/966e0415-09e4-4b17-9806-7d6e570ed19a-kube-api-access-w2xw5\") pod \"ceilometer-0\" (UID: \"966e0415-09e4-4b17-9806-7d6e570ed19a\") " pod="openstack/ceilometer-0" Dec 02 14:39:00 crc kubenswrapper[4902]: I1202 14:39:00.826205 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 14:39:01 crc kubenswrapper[4902]: I1202 14:39:01.122419 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="956e1c8e-f46e-4020-bc0e-573973ff1460" path="/var/lib/kubelet/pods/956e1c8e-f46e-4020-bc0e-573973ff1460/volumes" Dec 02 14:39:01 crc kubenswrapper[4902]: I1202 14:39:01.319829 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 14:39:01 crc kubenswrapper[4902]: I1202 14:39:01.417126 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"966e0415-09e4-4b17-9806-7d6e570ed19a","Type":"ContainerStarted","Data":"7eefda0a2568a8b2ef95a8723442fd7196b63bfd0f0e5a26a922745247b166ee"} Dec 02 14:39:02 crc kubenswrapper[4902]: I1202 14:39:02.429753 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"966e0415-09e4-4b17-9806-7d6e570ed19a","Type":"ContainerStarted","Data":"e421a5001ce9b53e4e0f3b2ecddd4e4815a08aa55592f477baadff09ca29de30"} Dec 02 14:39:02 crc kubenswrapper[4902]: I1202 14:39:02.431386 4902 generic.go:334] "Generic (PLEG): container finished" podID="0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a" containerID="1aadb9450c881314ebb72943ddf6f16dd075cdc525183bbeac66aacae169c39b" exitCode=0 Dec 02 14:39:02 crc kubenswrapper[4902]: I1202 14:39:02.431420 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-r24tk" event={"ID":"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a","Type":"ContainerDied","Data":"1aadb9450c881314ebb72943ddf6f16dd075cdc525183bbeac66aacae169c39b"} Dec 02 14:39:03 crc kubenswrapper[4902]: I1202 14:39:03.444281 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"966e0415-09e4-4b17-9806-7d6e570ed19a","Type":"ContainerStarted","Data":"be0ee317384b7afdbdd13bf3c9eac68d9ad6436dad4e4527baddd703bfdaa0d3"} Dec 02 14:39:03 crc kubenswrapper[4902]: I1202 14:39:03.731631 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 14:39:03 crc kubenswrapper[4902]: I1202 14:39:03.731898 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 14:39:03 crc kubenswrapper[4902]: I1202 14:39:03.922756 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.019847 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-combined-ca-bundle\") pod \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.019926 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-config-data\") pod \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.020240 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d22zj\" (UniqueName: \"kubernetes.io/projected/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-kube-api-access-d22zj\") pod \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.020411 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-scripts\") pod \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\" (UID: \"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a\") " Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.027118 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-scripts" (OuterVolumeSpecName: "scripts") pod "0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a" (UID: "0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.043198 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-kube-api-access-d22zj" (OuterVolumeSpecName: "kube-api-access-d22zj") pod "0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a" (UID: "0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a"). InnerVolumeSpecName "kube-api-access-d22zj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.081272 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-config-data" (OuterVolumeSpecName: "config-data") pod "0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a" (UID: "0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.096726 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a" (UID: "0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.122854 4902 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.123154 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.123166 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.123182 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d22zj\" (UniqueName: \"kubernetes.io/projected/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a-kube-api-access-d22zj\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.452903 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-r24tk" event={"ID":"0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a","Type":"ContainerDied","Data":"95c46475f9a400275142263f819a141be20e707f038dd774680b573a195d3141"} Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.452941 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95c46475f9a400275142263f819a141be20e707f038dd774680b573a195d3141" Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.452950 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-r24tk" Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.456396 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"966e0415-09e4-4b17-9806-7d6e570ed19a","Type":"ContainerStarted","Data":"c48568fdb2363661469a9e8641f7a80c298c888740990ab8a103a6cea8481886"} Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.659994 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.660245 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" containerName="nova-api-log" containerID="cri-o://4224b053cc492d3995ba4080851967da18d49cf95e9d61abe8073d81ae918241" gracePeriod=30 Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.660353 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" containerName="nova-api-api" containerID="cri-o://4674402171b0796884a5c4c3dfe09579a12a035e82635e6d5d2ddd633efdc3d2" gracePeriod=30 Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.675472 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.220:8774/\": EOF" Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.691902 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.692112 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="3cf756fd-5346-49b5-a556-f6d0b71941c9" containerName="nova-scheduler-scheduler" containerID="cri-o://0a176451574e2c20596be153a27568e29d0ca5e033c9cfc487c6578816e6e3a2" gracePeriod=30 Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.699846 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.220:8774/\": EOF" Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.806169 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.806621 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d81580a2-c346-4315-aee2-d6346b5c55d0" containerName="nova-metadata-log" containerID="cri-o://b3d77c1e37faf51c6022b6cd28af4bff0972afe51ef8f1525fb52bff71f17c2a" gracePeriod=30 Dec 02 14:39:04 crc kubenswrapper[4902]: I1202 14:39:04.806735 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d81580a2-c346-4315-aee2-d6346b5c55d0" containerName="nova-metadata-metadata" containerID="cri-o://dd1ec77aaea611dc396284d97b0692ab6fe8606cf28968b971ee5bcd17f614d3" gracePeriod=30 Dec 02 14:39:05 crc kubenswrapper[4902]: I1202 14:39:05.467273 4902 generic.go:334] "Generic (PLEG): container finished" podID="d81580a2-c346-4315-aee2-d6346b5c55d0" containerID="b3d77c1e37faf51c6022b6cd28af4bff0972afe51ef8f1525fb52bff71f17c2a" exitCode=143 Dec 02 14:39:05 crc kubenswrapper[4902]: I1202 14:39:05.467346 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d81580a2-c346-4315-aee2-d6346b5c55d0","Type":"ContainerDied","Data":"b3d77c1e37faf51c6022b6cd28af4bff0972afe51ef8f1525fb52bff71f17c2a"} Dec 02 14:39:05 crc kubenswrapper[4902]: I1202 14:39:05.470624 4902 generic.go:334] "Generic (PLEG): container finished" podID="c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" containerID="4224b053cc492d3995ba4080851967da18d49cf95e9d61abe8073d81ae918241" exitCode=143 Dec 02 14:39:05 crc kubenswrapper[4902]: I1202 14:39:05.470670 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b","Type":"ContainerDied","Data":"4224b053cc492d3995ba4080851967da18d49cf95e9d61abe8073d81ae918241"} Dec 02 14:39:06 crc kubenswrapper[4902]: I1202 14:39:06.484309 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"966e0415-09e4-4b17-9806-7d6e570ed19a","Type":"ContainerStarted","Data":"f34fd83e7eb5c9ba2e17db8a66b09deeb0e9338228eb02a05cf4a42ed247b160"} Dec 02 14:39:06 crc kubenswrapper[4902]: I1202 14:39:06.484771 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 14:39:06 crc kubenswrapper[4902]: I1202 14:39:06.514720 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.001932461 podStartE2EDuration="6.514698272s" podCreationTimestamp="2025-12-02 14:39:00 +0000 UTC" firstStartedPulling="2025-12-02 14:39:01.311999621 +0000 UTC m=+1372.503308330" lastFinishedPulling="2025-12-02 14:39:05.824765432 +0000 UTC m=+1377.016074141" observedRunningTime="2025-12-02 14:39:06.511007238 +0000 UTC m=+1377.702315947" watchObservedRunningTime="2025-12-02 14:39:06.514698272 +0000 UTC m=+1377.706006981" Dec 02 14:39:06 crc kubenswrapper[4902]: I1202 14:39:06.924120 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.079400 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8dwh\" (UniqueName: \"kubernetes.io/projected/3cf756fd-5346-49b5-a556-f6d0b71941c9-kube-api-access-h8dwh\") pod \"3cf756fd-5346-49b5-a556-f6d0b71941c9\" (UID: \"3cf756fd-5346-49b5-a556-f6d0b71941c9\") " Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.079470 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cf756fd-5346-49b5-a556-f6d0b71941c9-combined-ca-bundle\") pod \"3cf756fd-5346-49b5-a556-f6d0b71941c9\" (UID: \"3cf756fd-5346-49b5-a556-f6d0b71941c9\") " Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.079509 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cf756fd-5346-49b5-a556-f6d0b71941c9-config-data\") pod \"3cf756fd-5346-49b5-a556-f6d0b71941c9\" (UID: \"3cf756fd-5346-49b5-a556-f6d0b71941c9\") " Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.086974 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cf756fd-5346-49b5-a556-f6d0b71941c9-kube-api-access-h8dwh" (OuterVolumeSpecName: "kube-api-access-h8dwh") pod "3cf756fd-5346-49b5-a556-f6d0b71941c9" (UID: "3cf756fd-5346-49b5-a556-f6d0b71941c9"). InnerVolumeSpecName "kube-api-access-h8dwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.111139 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cf756fd-5346-49b5-a556-f6d0b71941c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3cf756fd-5346-49b5-a556-f6d0b71941c9" (UID: "3cf756fd-5346-49b5-a556-f6d0b71941c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.114089 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cf756fd-5346-49b5-a556-f6d0b71941c9-config-data" (OuterVolumeSpecName: "config-data") pod "3cf756fd-5346-49b5-a556-f6d0b71941c9" (UID: "3cf756fd-5346-49b5-a556-f6d0b71941c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.182605 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8dwh\" (UniqueName: \"kubernetes.io/projected/3cf756fd-5346-49b5-a556-f6d0b71941c9-kube-api-access-h8dwh\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.182652 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cf756fd-5346-49b5-a556-f6d0b71941c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.182664 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cf756fd-5346-49b5-a556-f6d0b71941c9-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.498166 4902 generic.go:334] "Generic (PLEG): container finished" podID="3cf756fd-5346-49b5-a556-f6d0b71941c9" containerID="0a176451574e2c20596be153a27568e29d0ca5e033c9cfc487c6578816e6e3a2" exitCode=0 Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.498221 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3cf756fd-5346-49b5-a556-f6d0b71941c9","Type":"ContainerDied","Data":"0a176451574e2c20596be153a27568e29d0ca5e033c9cfc487c6578816e6e3a2"} Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.498283 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3cf756fd-5346-49b5-a556-f6d0b71941c9","Type":"ContainerDied","Data":"8a40149cd27214d72f07ca5d62d9da91eadbbb9ba2ec2f8d23dc7906eafc9301"} Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.498315 4902 scope.go:117] "RemoveContainer" containerID="0a176451574e2c20596be153a27568e29d0ca5e033c9cfc487c6578816e6e3a2" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.498235 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.528324 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.530023 4902 scope.go:117] "RemoveContainer" containerID="0a176451574e2c20596be153a27568e29d0ca5e033c9cfc487c6578816e6e3a2" Dec 02 14:39:07 crc kubenswrapper[4902]: E1202 14:39:07.530754 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a176451574e2c20596be153a27568e29d0ca5e033c9cfc487c6578816e6e3a2\": container with ID starting with 0a176451574e2c20596be153a27568e29d0ca5e033c9cfc487c6578816e6e3a2 not found: ID does not exist" containerID="0a176451574e2c20596be153a27568e29d0ca5e033c9cfc487c6578816e6e3a2" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.530807 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a176451574e2c20596be153a27568e29d0ca5e033c9cfc487c6578816e6e3a2"} err="failed to get container status \"0a176451574e2c20596be153a27568e29d0ca5e033c9cfc487c6578816e6e3a2\": rpc error: code = NotFound desc = could not find container \"0a176451574e2c20596be153a27568e29d0ca5e033c9cfc487c6578816e6e3a2\": container with ID starting with 0a176451574e2c20596be153a27568e29d0ca5e033c9cfc487c6578816e6e3a2 not found: ID does not exist" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.540069 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.556359 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 14:39:07 crc kubenswrapper[4902]: E1202 14:39:07.556778 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cf756fd-5346-49b5-a556-f6d0b71941c9" containerName="nova-scheduler-scheduler" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.556803 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cf756fd-5346-49b5-a556-f6d0b71941c9" containerName="nova-scheduler-scheduler" Dec 02 14:39:07 crc kubenswrapper[4902]: E1202 14:39:07.556838 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a" containerName="nova-manage" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.556847 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a" containerName="nova-manage" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.557064 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cf756fd-5346-49b5-a556-f6d0b71941c9" containerName="nova-scheduler-scheduler" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.557097 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a" containerName="nova-manage" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.564184 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.567226 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.585993 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.692390 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck6dr\" (UniqueName: \"kubernetes.io/projected/10a0095a-bf9d-45ec-9b8f-1b1543ed641c-kube-api-access-ck6dr\") pod \"nova-scheduler-0\" (UID: \"10a0095a-bf9d-45ec-9b8f-1b1543ed641c\") " pod="openstack/nova-scheduler-0" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.692629 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10a0095a-bf9d-45ec-9b8f-1b1543ed641c-config-data\") pod \"nova-scheduler-0\" (UID: \"10a0095a-bf9d-45ec-9b8f-1b1543ed641c\") " pod="openstack/nova-scheduler-0" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.692731 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10a0095a-bf9d-45ec-9b8f-1b1543ed641c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"10a0095a-bf9d-45ec-9b8f-1b1543ed641c\") " pod="openstack/nova-scheduler-0" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.794370 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10a0095a-bf9d-45ec-9b8f-1b1543ed641c-config-data\") pod \"nova-scheduler-0\" (UID: \"10a0095a-bf9d-45ec-9b8f-1b1543ed641c\") " pod="openstack/nova-scheduler-0" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.794448 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10a0095a-bf9d-45ec-9b8f-1b1543ed641c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"10a0095a-bf9d-45ec-9b8f-1b1543ed641c\") " pod="openstack/nova-scheduler-0" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.794603 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck6dr\" (UniqueName: \"kubernetes.io/projected/10a0095a-bf9d-45ec-9b8f-1b1543ed641c-kube-api-access-ck6dr\") pod \"nova-scheduler-0\" (UID: \"10a0095a-bf9d-45ec-9b8f-1b1543ed641c\") " pod="openstack/nova-scheduler-0" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.799942 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10a0095a-bf9d-45ec-9b8f-1b1543ed641c-config-data\") pod \"nova-scheduler-0\" (UID: \"10a0095a-bf9d-45ec-9b8f-1b1543ed641c\") " pod="openstack/nova-scheduler-0" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.801190 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10a0095a-bf9d-45ec-9b8f-1b1543ed641c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"10a0095a-bf9d-45ec-9b8f-1b1543ed641c\") " pod="openstack/nova-scheduler-0" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.814492 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck6dr\" (UniqueName: \"kubernetes.io/projected/10a0095a-bf9d-45ec-9b8f-1b1543ed641c-kube-api-access-ck6dr\") pod \"nova-scheduler-0\" (UID: \"10a0095a-bf9d-45ec-9b8f-1b1543ed641c\") " pod="openstack/nova-scheduler-0" Dec 02 14:39:07 crc kubenswrapper[4902]: I1202 14:39:07.933454 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 14:39:08 crc kubenswrapper[4902]: I1202 14:39:08.010165 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d81580a2-c346-4315-aee2-d6346b5c55d0" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.212:8775/\": read tcp 10.217.0.2:45958->10.217.0.212:8775: read: connection reset by peer" Dec 02 14:39:08 crc kubenswrapper[4902]: I1202 14:39:08.010183 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d81580a2-c346-4315-aee2-d6346b5c55d0" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.212:8775/\": read tcp 10.217.0.2:45966->10.217.0.212:8775: read: connection reset by peer" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.523830 4902 generic.go:334] "Generic (PLEG): container finished" podID="d81580a2-c346-4315-aee2-d6346b5c55d0" containerID="dd1ec77aaea611dc396284d97b0692ab6fe8606cf28968b971ee5bcd17f614d3" exitCode=0 Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.523866 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d81580a2-c346-4315-aee2-d6346b5c55d0","Type":"ContainerDied","Data":"dd1ec77aaea611dc396284d97b0692ab6fe8606cf28968b971ee5bcd17f614d3"} Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.565903 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 14:39:09 crc kubenswrapper[4902]: W1202 14:39:08.594404 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10a0095a_bf9d_45ec_9b8f_1b1543ed641c.slice/crio-bbc9e00edd414fbc1cf1b875a072c789c68e2912e4fa9f7c520b21394811e6f0 WatchSource:0}: Error finding container bbc9e00edd414fbc1cf1b875a072c789c68e2912e4fa9f7c520b21394811e6f0: Status 404 returned error can't find the container with id bbc9e00edd414fbc1cf1b875a072c789c68e2912e4fa9f7c520b21394811e6f0 Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.726228 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.823163 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7flj\" (UniqueName: \"kubernetes.io/projected/d81580a2-c346-4315-aee2-d6346b5c55d0-kube-api-access-d7flj\") pod \"d81580a2-c346-4315-aee2-d6346b5c55d0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.823206 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d81580a2-c346-4315-aee2-d6346b5c55d0-logs\") pod \"d81580a2-c346-4315-aee2-d6346b5c55d0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.823255 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-nova-metadata-tls-certs\") pod \"d81580a2-c346-4315-aee2-d6346b5c55d0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.823361 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-config-data\") pod \"d81580a2-c346-4315-aee2-d6346b5c55d0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.823397 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-combined-ca-bundle\") pod \"d81580a2-c346-4315-aee2-d6346b5c55d0\" (UID: \"d81580a2-c346-4315-aee2-d6346b5c55d0\") " Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.823910 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d81580a2-c346-4315-aee2-d6346b5c55d0-logs" (OuterVolumeSpecName: "logs") pod "d81580a2-c346-4315-aee2-d6346b5c55d0" (UID: "d81580a2-c346-4315-aee2-d6346b5c55d0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.828527 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d81580a2-c346-4315-aee2-d6346b5c55d0-kube-api-access-d7flj" (OuterVolumeSpecName: "kube-api-access-d7flj") pod "d81580a2-c346-4315-aee2-d6346b5c55d0" (UID: "d81580a2-c346-4315-aee2-d6346b5c55d0"). InnerVolumeSpecName "kube-api-access-d7flj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.912195 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d81580a2-c346-4315-aee2-d6346b5c55d0" (UID: "d81580a2-c346-4315-aee2-d6346b5c55d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.923905 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-config-data" (OuterVolumeSpecName: "config-data") pod "d81580a2-c346-4315-aee2-d6346b5c55d0" (UID: "d81580a2-c346-4315-aee2-d6346b5c55d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.927310 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7flj\" (UniqueName: \"kubernetes.io/projected/d81580a2-c346-4315-aee2-d6346b5c55d0-kube-api-access-d7flj\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.927344 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d81580a2-c346-4315-aee2-d6346b5c55d0-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.927359 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.927372 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:08.940524 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "d81580a2-c346-4315-aee2-d6346b5c55d0" (UID: "d81580a2-c346-4315-aee2-d6346b5c55d0"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.029303 4902 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d81580a2-c346-4315-aee2-d6346b5c55d0-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.141178 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cf756fd-5346-49b5-a556-f6d0b71941c9" path="/var/lib/kubelet/pods/3cf756fd-5346-49b5-a556-f6d0b71941c9/volumes" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.541079 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"10a0095a-bf9d-45ec-9b8f-1b1543ed641c","Type":"ContainerStarted","Data":"8789f7f5be628a15f6aead227941fdd522be69a0ca1e6de27bde53cba8456407"} Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.541163 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"10a0095a-bf9d-45ec-9b8f-1b1543ed641c","Type":"ContainerStarted","Data":"bbc9e00edd414fbc1cf1b875a072c789c68e2912e4fa9f7c520b21394811e6f0"} Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.545039 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d81580a2-c346-4315-aee2-d6346b5c55d0","Type":"ContainerDied","Data":"8293b8b3f78faad9ecd5a4971fc8dc5bf5ee73c1355930b0c0ecc3694341af48"} Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.545145 4902 scope.go:117] "RemoveContainer" containerID="dd1ec77aaea611dc396284d97b0692ab6fe8606cf28968b971ee5bcd17f614d3" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.545539 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.574636 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.574607105 podStartE2EDuration="2.574607105s" podCreationTimestamp="2025-12-02 14:39:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:39:09.57409087 +0000 UTC m=+1380.765399619" watchObservedRunningTime="2025-12-02 14:39:09.574607105 +0000 UTC m=+1380.765915854" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.587078 4902 scope.go:117] "RemoveContainer" containerID="b3d77c1e37faf51c6022b6cd28af4bff0972afe51ef8f1525fb52bff71f17c2a" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.603996 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.615496 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.628421 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:39:09 crc kubenswrapper[4902]: E1202 14:39:09.628916 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d81580a2-c346-4315-aee2-d6346b5c55d0" containerName="nova-metadata-log" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.628939 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="d81580a2-c346-4315-aee2-d6346b5c55d0" containerName="nova-metadata-log" Dec 02 14:39:09 crc kubenswrapper[4902]: E1202 14:39:09.628960 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d81580a2-c346-4315-aee2-d6346b5c55d0" containerName="nova-metadata-metadata" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.628971 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="d81580a2-c346-4315-aee2-d6346b5c55d0" containerName="nova-metadata-metadata" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.629254 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="d81580a2-c346-4315-aee2-d6346b5c55d0" containerName="nova-metadata-log" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.629284 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="d81580a2-c346-4315-aee2-d6346b5c55d0" containerName="nova-metadata-metadata" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.630607 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.636620 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.637900 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.661328 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.746703 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/50eb495f-ceaa-4583-b3d6-42ba67a92160-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"50eb495f-ceaa-4583-b3d6-42ba67a92160\") " pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.746764 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50eb495f-ceaa-4583-b3d6-42ba67a92160-logs\") pod \"nova-metadata-0\" (UID: \"50eb495f-ceaa-4583-b3d6-42ba67a92160\") " pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.746808 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50eb495f-ceaa-4583-b3d6-42ba67a92160-config-data\") pod \"nova-metadata-0\" (UID: \"50eb495f-ceaa-4583-b3d6-42ba67a92160\") " pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.746850 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fx5b5\" (UniqueName: \"kubernetes.io/projected/50eb495f-ceaa-4583-b3d6-42ba67a92160-kube-api-access-fx5b5\") pod \"nova-metadata-0\" (UID: \"50eb495f-ceaa-4583-b3d6-42ba67a92160\") " pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.746878 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50eb495f-ceaa-4583-b3d6-42ba67a92160-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"50eb495f-ceaa-4583-b3d6-42ba67a92160\") " pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.862431 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50eb495f-ceaa-4583-b3d6-42ba67a92160-config-data\") pod \"nova-metadata-0\" (UID: \"50eb495f-ceaa-4583-b3d6-42ba67a92160\") " pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.862540 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fx5b5\" (UniqueName: \"kubernetes.io/projected/50eb495f-ceaa-4583-b3d6-42ba67a92160-kube-api-access-fx5b5\") pod \"nova-metadata-0\" (UID: \"50eb495f-ceaa-4583-b3d6-42ba67a92160\") " pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.866876 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50eb495f-ceaa-4583-b3d6-42ba67a92160-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"50eb495f-ceaa-4583-b3d6-42ba67a92160\") " pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.869395 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/50eb495f-ceaa-4583-b3d6-42ba67a92160-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"50eb495f-ceaa-4583-b3d6-42ba67a92160\") " pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.869515 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50eb495f-ceaa-4583-b3d6-42ba67a92160-logs\") pod \"nova-metadata-0\" (UID: \"50eb495f-ceaa-4583-b3d6-42ba67a92160\") " pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.869599 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50eb495f-ceaa-4583-b3d6-42ba67a92160-config-data\") pod \"nova-metadata-0\" (UID: \"50eb495f-ceaa-4583-b3d6-42ba67a92160\") " pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.869849 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50eb495f-ceaa-4583-b3d6-42ba67a92160-logs\") pod \"nova-metadata-0\" (UID: \"50eb495f-ceaa-4583-b3d6-42ba67a92160\") " pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.870373 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50eb495f-ceaa-4583-b3d6-42ba67a92160-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"50eb495f-ceaa-4583-b3d6-42ba67a92160\") " pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.881916 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/50eb495f-ceaa-4583-b3d6-42ba67a92160-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"50eb495f-ceaa-4583-b3d6-42ba67a92160\") " pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.908429 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fx5b5\" (UniqueName: \"kubernetes.io/projected/50eb495f-ceaa-4583-b3d6-42ba67a92160-kube-api-access-fx5b5\") pod \"nova-metadata-0\" (UID: \"50eb495f-ceaa-4583-b3d6-42ba67a92160\") " pod="openstack/nova-metadata-0" Dec 02 14:39:09 crc kubenswrapper[4902]: I1202 14:39:09.966704 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 14:39:10 crc kubenswrapper[4902]: W1202 14:39:10.519768 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50eb495f_ceaa_4583_b3d6_42ba67a92160.slice/crio-d86ea6ee5964d91c8a2c10d9066e46d308d76a279c23cca4c40832f848250252 WatchSource:0}: Error finding container d86ea6ee5964d91c8a2c10d9066e46d308d76a279c23cca4c40832f848250252: Status 404 returned error can't find the container with id d86ea6ee5964d91c8a2c10d9066e46d308d76a279c23cca4c40832f848250252 Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.526041 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.557268 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"50eb495f-ceaa-4583-b3d6-42ba67a92160","Type":"ContainerStarted","Data":"d86ea6ee5964d91c8a2c10d9066e46d308d76a279c23cca4c40832f848250252"} Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.564673 4902 generic.go:334] "Generic (PLEG): container finished" podID="c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" containerID="4674402171b0796884a5c4c3dfe09579a12a035e82635e6d5d2ddd633efdc3d2" exitCode=0 Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.564762 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b","Type":"ContainerDied","Data":"4674402171b0796884a5c4c3dfe09579a12a035e82635e6d5d2ddd633efdc3d2"} Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.675913 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.787145 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-config-data\") pod \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.787361 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-public-tls-certs\") pod \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.787447 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xwk6\" (UniqueName: \"kubernetes.io/projected/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-kube-api-access-7xwk6\") pod \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.787614 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-combined-ca-bundle\") pod \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.787828 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-internal-tls-certs\") pod \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.787977 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-logs\") pod \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\" (UID: \"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b\") " Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.789033 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-logs" (OuterVolumeSpecName: "logs") pod "c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" (UID: "c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.793711 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-kube-api-access-7xwk6" (OuterVolumeSpecName: "kube-api-access-7xwk6") pod "c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" (UID: "c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b"). InnerVolumeSpecName "kube-api-access-7xwk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.838316 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" (UID: "c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.867287 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" (UID: "c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.872761 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" (UID: "c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.881356 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-config-data" (OuterVolumeSpecName: "config-data") pod "c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" (UID: "c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.890382 4902 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-logs\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.890417 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.890433 4902 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.890455 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xwk6\" (UniqueName: \"kubernetes.io/projected/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-kube-api-access-7xwk6\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.890468 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:10 crc kubenswrapper[4902]: I1202 14:39:10.890479 4902 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.133922 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d81580a2-c346-4315-aee2-d6346b5c55d0" path="/var/lib/kubelet/pods/d81580a2-c346-4315-aee2-d6346b5c55d0/volumes" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.575529 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b","Type":"ContainerDied","Data":"23c4d8144da270445dcc568fde5b6755196e19d5191d05916195a2d665c81dab"} Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.575539 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.575904 4902 scope.go:117] "RemoveContainer" containerID="4674402171b0796884a5c4c3dfe09579a12a035e82635e6d5d2ddd633efdc3d2" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.577253 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"50eb495f-ceaa-4583-b3d6-42ba67a92160","Type":"ContainerStarted","Data":"9fe26d7e012c3045468f17c47f961efbad67dd43fb6029a0cf06466a3d4264d5"} Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.577363 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"50eb495f-ceaa-4583-b3d6-42ba67a92160","Type":"ContainerStarted","Data":"83ca77474c10d36e8d95abb00df0fa0c57b57761d91529af83e6799b75a1ddcc"} Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.598319 4902 scope.go:117] "RemoveContainer" containerID="4224b053cc492d3995ba4080851967da18d49cf95e9d61abe8073d81ae918241" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.607941 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.607917953 podStartE2EDuration="2.607917953s" podCreationTimestamp="2025-12-02 14:39:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:39:11.59898431 +0000 UTC m=+1382.790293019" watchObservedRunningTime="2025-12-02 14:39:11.607917953 +0000 UTC m=+1382.799226662" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.632476 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.665115 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.681904 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 14:39:11 crc kubenswrapper[4902]: E1202 14:39:11.682278 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" containerName="nova-api-log" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.682293 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" containerName="nova-api-log" Dec 02 14:39:11 crc kubenswrapper[4902]: E1202 14:39:11.682332 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" containerName="nova-api-api" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.682344 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" containerName="nova-api-api" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.682573 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" containerName="nova-api-log" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.682595 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" containerName="nova-api-api" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.684987 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.685096 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.687105 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.691772 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.706131 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.816727 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sbm4\" (UniqueName: \"kubernetes.io/projected/c8412330-d9d3-42bd-be8a-212966221fda-kube-api-access-6sbm4\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.816855 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8412330-d9d3-42bd-be8a-212966221fda-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.816875 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8412330-d9d3-42bd-be8a-212966221fda-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.816896 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8412330-d9d3-42bd-be8a-212966221fda-logs\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.816931 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8412330-d9d3-42bd-be8a-212966221fda-public-tls-certs\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.816991 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8412330-d9d3-42bd-be8a-212966221fda-config-data\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.918898 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8412330-d9d3-42bd-be8a-212966221fda-config-data\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.918984 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sbm4\" (UniqueName: \"kubernetes.io/projected/c8412330-d9d3-42bd-be8a-212966221fda-kube-api-access-6sbm4\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.919090 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8412330-d9d3-42bd-be8a-212966221fda-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.919111 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8412330-d9d3-42bd-be8a-212966221fda-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.919139 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8412330-d9d3-42bd-be8a-212966221fda-logs\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.919178 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8412330-d9d3-42bd-be8a-212966221fda-public-tls-certs\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.920891 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8412330-d9d3-42bd-be8a-212966221fda-logs\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.929399 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8412330-d9d3-42bd-be8a-212966221fda-config-data\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.934178 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8412330-d9d3-42bd-be8a-212966221fda-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.941296 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sbm4\" (UniqueName: \"kubernetes.io/projected/c8412330-d9d3-42bd-be8a-212966221fda-kube-api-access-6sbm4\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.943173 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8412330-d9d3-42bd-be8a-212966221fda-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:11 crc kubenswrapper[4902]: I1202 14:39:11.950273 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8412330-d9d3-42bd-be8a-212966221fda-public-tls-certs\") pod \"nova-api-0\" (UID: \"c8412330-d9d3-42bd-be8a-212966221fda\") " pod="openstack/nova-api-0" Dec 02 14:39:12 crc kubenswrapper[4902]: I1202 14:39:12.027024 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 14:39:12 crc kubenswrapper[4902]: I1202 14:39:12.487015 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 14:39:12 crc kubenswrapper[4902]: I1202 14:39:12.590083 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c8412330-d9d3-42bd-be8a-212966221fda","Type":"ContainerStarted","Data":"6fa3b27e218c56e13d7a478f5c080d8c507a4e3ebf9457eeb07ff38a4cc6429b"} Dec 02 14:39:12 crc kubenswrapper[4902]: I1202 14:39:12.933638 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 02 14:39:13 crc kubenswrapper[4902]: I1202 14:39:13.122891 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b" path="/var/lib/kubelet/pods/c5a0b0b8-26ef-443d-9ff9-96ff84f1a74b/volumes" Dec 02 14:39:13 crc kubenswrapper[4902]: I1202 14:39:13.602505 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c8412330-d9d3-42bd-be8a-212966221fda","Type":"ContainerStarted","Data":"660016ee74f16f77672917e77636db2a8b11087ec05455b4d4742483a0c4ece6"} Dec 02 14:39:13 crc kubenswrapper[4902]: I1202 14:39:13.602834 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c8412330-d9d3-42bd-be8a-212966221fda","Type":"ContainerStarted","Data":"02112d985ff8276d3597cae8e04180e693a9de7e9e592a650cc9b3e5840ab362"} Dec 02 14:39:14 crc kubenswrapper[4902]: E1202 14:39:14.507345 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd81580a2_c346_4315_aee2_d6346b5c55d0.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd81580a2_c346_4315_aee2_d6346b5c55d0.slice/crio-8293b8b3f78faad9ecd5a4971fc8dc5bf5ee73c1355930b0c0ecc3694341af48\": RecentStats: unable to find data in memory cache]" Dec 02 14:39:14 crc kubenswrapper[4902]: I1202 14:39:14.967160 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 14:39:14 crc kubenswrapper[4902]: I1202 14:39:14.967299 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 14:39:17 crc kubenswrapper[4902]: I1202 14:39:17.933687 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 02 14:39:17 crc kubenswrapper[4902]: I1202 14:39:17.961487 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 02 14:39:17 crc kubenswrapper[4902]: I1202 14:39:17.993772 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=6.993753213 podStartE2EDuration="6.993753213s" podCreationTimestamp="2025-12-02 14:39:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:39:13.621554503 +0000 UTC m=+1384.812863232" watchObservedRunningTime="2025-12-02 14:39:17.993753213 +0000 UTC m=+1389.185061922" Dec 02 14:39:18 crc kubenswrapper[4902]: I1202 14:39:18.697906 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 02 14:39:19 crc kubenswrapper[4902]: I1202 14:39:19.967467 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 14:39:19 crc kubenswrapper[4902]: I1202 14:39:19.968039 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 14:39:20 crc kubenswrapper[4902]: I1202 14:39:20.977713 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="50eb495f-ceaa-4583-b3d6-42ba67a92160" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.224:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 14:39:20 crc kubenswrapper[4902]: I1202 14:39:20.977730 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="50eb495f-ceaa-4583-b3d6-42ba67a92160" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.224:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 14:39:22 crc kubenswrapper[4902]: I1202 14:39:22.028396 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 14:39:22 crc kubenswrapper[4902]: I1202 14:39:22.030659 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 14:39:23 crc kubenswrapper[4902]: I1202 14:39:23.050710 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c8412330-d9d3-42bd-be8a-212966221fda" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.225:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 14:39:23 crc kubenswrapper[4902]: I1202 14:39:23.050748 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c8412330-d9d3-42bd-be8a-212966221fda" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.225:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 14:39:24 crc kubenswrapper[4902]: E1202 14:39:24.803321 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd81580a2_c346_4315_aee2_d6346b5c55d0.slice/crio-8293b8b3f78faad9ecd5a4971fc8dc5bf5ee73c1355930b0c0ecc3694341af48\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd81580a2_c346_4315_aee2_d6346b5c55d0.slice\": RecentStats: unable to find data in memory cache]" Dec 02 14:39:29 crc kubenswrapper[4902]: I1202 14:39:29.972906 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 14:39:29 crc kubenswrapper[4902]: I1202 14:39:29.982769 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 14:39:29 crc kubenswrapper[4902]: I1202 14:39:29.983987 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 14:39:30 crc kubenswrapper[4902]: I1202 14:39:30.801842 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 14:39:30 crc kubenswrapper[4902]: I1202 14:39:30.870314 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 02 14:39:32 crc kubenswrapper[4902]: I1202 14:39:32.038621 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 14:39:32 crc kubenswrapper[4902]: I1202 14:39:32.039290 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 14:39:32 crc kubenswrapper[4902]: I1202 14:39:32.042790 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 14:39:32 crc kubenswrapper[4902]: I1202 14:39:32.044689 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 14:39:32 crc kubenswrapper[4902]: I1202 14:39:32.824061 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 14:39:32 crc kubenswrapper[4902]: I1202 14:39:32.856498 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 14:39:35 crc kubenswrapper[4902]: E1202 14:39:35.117204 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd81580a2_c346_4315_aee2_d6346b5c55d0.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd81580a2_c346_4315_aee2_d6346b5c55d0.slice/crio-8293b8b3f78faad9ecd5a4971fc8dc5bf5ee73c1355930b0c0ecc3694341af48\": RecentStats: unable to find data in memory cache]" Dec 02 14:39:41 crc kubenswrapper[4902]: I1202 14:39:41.223211 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 14:39:42 crc kubenswrapper[4902]: I1202 14:39:42.059385 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 14:39:45 crc kubenswrapper[4902]: E1202 14:39:45.417473 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd81580a2_c346_4315_aee2_d6346b5c55d0.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd81580a2_c346_4315_aee2_d6346b5c55d0.slice/crio-8293b8b3f78faad9ecd5a4971fc8dc5bf5ee73c1355930b0c0ecc3694341af48\": RecentStats: unable to find data in memory cache]" Dec 02 14:39:45 crc kubenswrapper[4902]: I1202 14:39:45.541295 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="abeb5bdf-6307-4dc9-9a97-d638817b544c" containerName="rabbitmq" containerID="cri-o://ca53563e8c7d21f932c5be2e48a675fdbfe3f262affc86b096bc5bd76d899a60" gracePeriod=604796 Dec 02 14:39:46 crc kubenswrapper[4902]: I1202 14:39:46.163656 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="4c350efc-48d5-4e5d-acee-399252e1b24a" containerName="rabbitmq" containerID="cri-o://7f8066199d6c9ba8091ba87e8a8bc33ffd01c44c33e11cf954e0fcc96793aae4" gracePeriod=604796 Dec 02 14:39:50 crc kubenswrapper[4902]: I1202 14:39:50.355147 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="abeb5bdf-6307-4dc9-9a97-d638817b544c" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.105:5671: connect: connection refused" Dec 02 14:39:50 crc kubenswrapper[4902]: I1202 14:39:50.492362 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="4c350efc-48d5-4e5d-acee-399252e1b24a" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.106:5671: connect: connection refused" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.080157 4902 generic.go:334] "Generic (PLEG): container finished" podID="abeb5bdf-6307-4dc9-9a97-d638817b544c" containerID="ca53563e8c7d21f932c5be2e48a675fdbfe3f262affc86b096bc5bd76d899a60" exitCode=0 Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.080482 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"abeb5bdf-6307-4dc9-9a97-d638817b544c","Type":"ContainerDied","Data":"ca53563e8c7d21f932c5be2e48a675fdbfe3f262affc86b096bc5bd76d899a60"} Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.480249 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.671973 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-confd\") pod \"abeb5bdf-6307-4dc9-9a97-d638817b544c\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.672031 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/abeb5bdf-6307-4dc9-9a97-d638817b544c-erlang-cookie-secret\") pod \"abeb5bdf-6307-4dc9-9a97-d638817b544c\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.672082 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-server-conf\") pod \"abeb5bdf-6307-4dc9-9a97-d638817b544c\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.672101 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-config-data\") pod \"abeb5bdf-6307-4dc9-9a97-d638817b544c\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.672128 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqdlm\" (UniqueName: \"kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-kube-api-access-cqdlm\") pod \"abeb5bdf-6307-4dc9-9a97-d638817b544c\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.672150 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-plugins-conf\") pod \"abeb5bdf-6307-4dc9-9a97-d638817b544c\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.672358 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-tls\") pod \"abeb5bdf-6307-4dc9-9a97-d638817b544c\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.672435 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-erlang-cookie\") pod \"abeb5bdf-6307-4dc9-9a97-d638817b544c\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.672474 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/abeb5bdf-6307-4dc9-9a97-d638817b544c-pod-info\") pod \"abeb5bdf-6307-4dc9-9a97-d638817b544c\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.672508 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-plugins\") pod \"abeb5bdf-6307-4dc9-9a97-d638817b544c\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.672548 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"abeb5bdf-6307-4dc9-9a97-d638817b544c\" (UID: \"abeb5bdf-6307-4dc9-9a97-d638817b544c\") " Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.809401 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "abeb5bdf-6307-4dc9-9a97-d638817b544c" (UID: "abeb5bdf-6307-4dc9-9a97-d638817b544c"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.809981 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "abeb5bdf-6307-4dc9-9a97-d638817b544c" (UID: "abeb5bdf-6307-4dc9-9a97-d638817b544c"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.810253 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-config-data" (OuterVolumeSpecName: "config-data") pod "abeb5bdf-6307-4dc9-9a97-d638817b544c" (UID: "abeb5bdf-6307-4dc9-9a97-d638817b544c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.810992 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "abeb5bdf-6307-4dc9-9a97-d638817b544c" (UID: "abeb5bdf-6307-4dc9-9a97-d638817b544c"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.810983 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "abeb5bdf-6307-4dc9-9a97-d638817b544c" (UID: "abeb5bdf-6307-4dc9-9a97-d638817b544c"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.811855 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-server-conf" (OuterVolumeSpecName: "server-conf") pod "abeb5bdf-6307-4dc9-9a97-d638817b544c" (UID: "abeb5bdf-6307-4dc9-9a97-d638817b544c"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.813114 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-kube-api-access-cqdlm" (OuterVolumeSpecName: "kube-api-access-cqdlm") pod "abeb5bdf-6307-4dc9-9a97-d638817b544c" (UID: "abeb5bdf-6307-4dc9-9a97-d638817b544c"). InnerVolumeSpecName "kube-api-access-cqdlm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.813929 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "abeb5bdf-6307-4dc9-9a97-d638817b544c" (UID: "abeb5bdf-6307-4dc9-9a97-d638817b544c"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.814203 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/abeb5bdf-6307-4dc9-9a97-d638817b544c-pod-info" (OuterVolumeSpecName: "pod-info") pod "abeb5bdf-6307-4dc9-9a97-d638817b544c" (UID: "abeb5bdf-6307-4dc9-9a97-d638817b544c"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.823507 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abeb5bdf-6307-4dc9-9a97-d638817b544c-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "abeb5bdf-6307-4dc9-9a97-d638817b544c" (UID: "abeb5bdf-6307-4dc9-9a97-d638817b544c"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.877516 4902 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.877553 4902 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/abeb5bdf-6307-4dc9-9a97-d638817b544c-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.877677 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.877691 4902 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-server-conf\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.877704 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqdlm\" (UniqueName: \"kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-kube-api-access-cqdlm\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.877717 4902 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/abeb5bdf-6307-4dc9-9a97-d638817b544c-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.877728 4902 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.877739 4902 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.877751 4902 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/abeb5bdf-6307-4dc9-9a97-d638817b544c-pod-info\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.877763 4902 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.926990 4902 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.954313 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "abeb5bdf-6307-4dc9-9a97-d638817b544c" (UID: "abeb5bdf-6307-4dc9-9a97-d638817b544c"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.979196 4902 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/abeb5bdf-6307-4dc9-9a97-d638817b544c-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:52 crc kubenswrapper[4902]: I1202 14:39:52.979228 4902 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.095774 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"abeb5bdf-6307-4dc9-9a97-d638817b544c","Type":"ContainerDied","Data":"142648d21c22f775b177d6586f42e10299b7cc92d330f6a129a3472011a9bfea"} Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.095835 4902 scope.go:117] "RemoveContainer" containerID="ca53563e8c7d21f932c5be2e48a675fdbfe3f262affc86b096bc5bd76d899a60" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.095838 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.100009 4902 generic.go:334] "Generic (PLEG): container finished" podID="4c350efc-48d5-4e5d-acee-399252e1b24a" containerID="7f8066199d6c9ba8091ba87e8a8bc33ffd01c44c33e11cf954e0fcc96793aae4" exitCode=0 Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.100050 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4c350efc-48d5-4e5d-acee-399252e1b24a","Type":"ContainerDied","Data":"7f8066199d6c9ba8091ba87e8a8bc33ffd01c44c33e11cf954e0fcc96793aae4"} Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.123447 4902 scope.go:117] "RemoveContainer" containerID="3033e15d7080893bfc76fe29831d27b48eb5732d47a6cbcd386f840438b6121c" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.137912 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.164759 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.171464 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 14:39:53 crc kubenswrapper[4902]: E1202 14:39:53.208229 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abeb5bdf-6307-4dc9-9a97-d638817b544c" containerName="rabbitmq" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.208264 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="abeb5bdf-6307-4dc9-9a97-d638817b544c" containerName="rabbitmq" Dec 02 14:39:53 crc kubenswrapper[4902]: E1202 14:39:53.208276 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abeb5bdf-6307-4dc9-9a97-d638817b544c" containerName="setup-container" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.208284 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="abeb5bdf-6307-4dc9-9a97-d638817b544c" containerName="setup-container" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.208542 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="abeb5bdf-6307-4dc9-9a97-d638817b544c" containerName="rabbitmq" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.210220 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.210337 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.213754 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.214035 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.214268 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-gfvhz" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.214383 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.216492 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.216641 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.216697 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.398376 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6x8w\" (UniqueName: \"kubernetes.io/projected/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-kube-api-access-v6x8w\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.398755 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.398786 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.398833 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-config-data\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.398849 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.398864 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.398901 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.398924 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.399018 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.399036 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.399069 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.501418 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.501464 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.501584 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-config-data\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.501605 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.501621 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.501655 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.501674 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.501749 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.501768 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.501798 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.501816 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6x8w\" (UniqueName: \"kubernetes.io/projected/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-kube-api-access-v6x8w\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.503105 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.503922 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.504793 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.505062 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-config-data\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.505137 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.505410 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.511130 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.511843 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.511990 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.512083 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.519224 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.525041 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6x8w\" (UniqueName: \"kubernetes.io/projected/6e03bb87-25fa-48ee-8e1a-20309d4d3a4b-kube-api-access-v6x8w\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.547356 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b\") " pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.706700 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-plugins\") pod \"4c350efc-48d5-4e5d-acee-399252e1b24a\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.706747 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-plugins-conf\") pod \"4c350efc-48d5-4e5d-acee-399252e1b24a\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.706775 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-config-data\") pod \"4c350efc-48d5-4e5d-acee-399252e1b24a\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.706831 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"4c350efc-48d5-4e5d-acee-399252e1b24a\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.706884 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-server-conf\") pod \"4c350efc-48d5-4e5d-acee-399252e1b24a\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.706976 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2n6d\" (UniqueName: \"kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-kube-api-access-x2n6d\") pod \"4c350efc-48d5-4e5d-acee-399252e1b24a\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.707024 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-erlang-cookie\") pod \"4c350efc-48d5-4e5d-acee-399252e1b24a\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.707086 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-tls\") pod \"4c350efc-48d5-4e5d-acee-399252e1b24a\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.707126 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4c350efc-48d5-4e5d-acee-399252e1b24a-pod-info\") pod \"4c350efc-48d5-4e5d-acee-399252e1b24a\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.707158 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4c350efc-48d5-4e5d-acee-399252e1b24a-erlang-cookie-secret\") pod \"4c350efc-48d5-4e5d-acee-399252e1b24a\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.707506 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-confd\") pod \"4c350efc-48d5-4e5d-acee-399252e1b24a\" (UID: \"4c350efc-48d5-4e5d-acee-399252e1b24a\") " Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.708185 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "4c350efc-48d5-4e5d-acee-399252e1b24a" (UID: "4c350efc-48d5-4e5d-acee-399252e1b24a"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.709518 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "4c350efc-48d5-4e5d-acee-399252e1b24a" (UID: "4c350efc-48d5-4e5d-acee-399252e1b24a"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.709697 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "4c350efc-48d5-4e5d-acee-399252e1b24a" (UID: "4c350efc-48d5-4e5d-acee-399252e1b24a"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.711552 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "4c350efc-48d5-4e5d-acee-399252e1b24a" (UID: "4c350efc-48d5-4e5d-acee-399252e1b24a"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.712116 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c350efc-48d5-4e5d-acee-399252e1b24a-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "4c350efc-48d5-4e5d-acee-399252e1b24a" (UID: "4c350efc-48d5-4e5d-acee-399252e1b24a"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.712231 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "persistence") pod "4c350efc-48d5-4e5d-acee-399252e1b24a" (UID: "4c350efc-48d5-4e5d-acee-399252e1b24a"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.713019 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/4c350efc-48d5-4e5d-acee-399252e1b24a-pod-info" (OuterVolumeSpecName: "pod-info") pod "4c350efc-48d5-4e5d-acee-399252e1b24a" (UID: "4c350efc-48d5-4e5d-acee-399252e1b24a"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.713925 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-kube-api-access-x2n6d" (OuterVolumeSpecName: "kube-api-access-x2n6d") pod "4c350efc-48d5-4e5d-acee-399252e1b24a" (UID: "4c350efc-48d5-4e5d-acee-399252e1b24a"). InnerVolumeSpecName "kube-api-access-x2n6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.741055 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-config-data" (OuterVolumeSpecName: "config-data") pod "4c350efc-48d5-4e5d-acee-399252e1b24a" (UID: "4c350efc-48d5-4e5d-acee-399252e1b24a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.760665 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-server-conf" (OuterVolumeSpecName: "server-conf") pod "4c350efc-48d5-4e5d-acee-399252e1b24a" (UID: "4c350efc-48d5-4e5d-acee-399252e1b24a"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.807001 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "4c350efc-48d5-4e5d-acee-399252e1b24a" (UID: "4c350efc-48d5-4e5d-acee-399252e1b24a"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.810269 4902 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4c350efc-48d5-4e5d-acee-399252e1b24a-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.810302 4902 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.810316 4902 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.810329 4902 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.810341 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.810378 4902 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.810391 4902 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4c350efc-48d5-4e5d-acee-399252e1b24a-server-conf\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.810403 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2n6d\" (UniqueName: \"kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-kube-api-access-x2n6d\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.810417 4902 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.810430 4902 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4c350efc-48d5-4e5d-acee-399252e1b24a-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.810441 4902 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4c350efc-48d5-4e5d-acee-399252e1b24a-pod-info\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.837932 4902 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.844989 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 14:39:53 crc kubenswrapper[4902]: I1202 14:39:53.914506 4902 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.115868 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4c350efc-48d5-4e5d-acee-399252e1b24a","Type":"ContainerDied","Data":"6f78c0777c78e0d0f8f965a43c346776c8ba3c732b5fed7d15462a83bf7ca25e"} Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.116209 4902 scope.go:117] "RemoveContainer" containerID="7f8066199d6c9ba8091ba87e8a8bc33ffd01c44c33e11cf954e0fcc96793aae4" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.115916 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.154589 4902 scope.go:117] "RemoveContainer" containerID="3e655273fe43d2b060992df0598b8087baceb35e6ab8d009013b8020e6022672" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.165199 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.179206 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.189973 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 14:39:54 crc kubenswrapper[4902]: E1202 14:39:54.190371 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c350efc-48d5-4e5d-acee-399252e1b24a" containerName="rabbitmq" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.190386 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c350efc-48d5-4e5d-acee-399252e1b24a" containerName="rabbitmq" Dec 02 14:39:54 crc kubenswrapper[4902]: E1202 14:39:54.190399 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c350efc-48d5-4e5d-acee-399252e1b24a" containerName="setup-container" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.190405 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c350efc-48d5-4e5d-acee-399252e1b24a" containerName="setup-container" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.190611 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c350efc-48d5-4e5d-acee-399252e1b24a" containerName="rabbitmq" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.192034 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.196129 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.196220 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.196337 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.196350 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.196368 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-7gl6j" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.196494 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.196748 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.211177 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.325388 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.327726 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1640e5ea-d80e-4302-9690-5ca9efaa9879-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.327841 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.327879 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1640e5ea-d80e-4302-9690-5ca9efaa9879-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.327909 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1640e5ea-d80e-4302-9690-5ca9efaa9879-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.327943 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1640e5ea-d80e-4302-9690-5ca9efaa9879-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.327974 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1640e5ea-d80e-4302-9690-5ca9efaa9879-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.328016 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1640e5ea-d80e-4302-9690-5ca9efaa9879-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.329471 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1640e5ea-d80e-4302-9690-5ca9efaa9879-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.329508 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1640e5ea-d80e-4302-9690-5ca9efaa9879-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.329536 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1640e5ea-d80e-4302-9690-5ca9efaa9879-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.329553 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-854lm\" (UniqueName: \"kubernetes.io/projected/1640e5ea-d80e-4302-9690-5ca9efaa9879-kube-api-access-854lm\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.431135 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1640e5ea-d80e-4302-9690-5ca9efaa9879-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.431293 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1640e5ea-d80e-4302-9690-5ca9efaa9879-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.431327 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1640e5ea-d80e-4302-9690-5ca9efaa9879-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.431355 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1640e5ea-d80e-4302-9690-5ca9efaa9879-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.431376 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-854lm\" (UniqueName: \"kubernetes.io/projected/1640e5ea-d80e-4302-9690-5ca9efaa9879-kube-api-access-854lm\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.431406 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1640e5ea-d80e-4302-9690-5ca9efaa9879-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.431478 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.431510 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1640e5ea-d80e-4302-9690-5ca9efaa9879-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.431533 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1640e5ea-d80e-4302-9690-5ca9efaa9879-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.431593 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1640e5ea-d80e-4302-9690-5ca9efaa9879-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.431620 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1640e5ea-d80e-4302-9690-5ca9efaa9879-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.433038 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1640e5ea-d80e-4302-9690-5ca9efaa9879-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.433433 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1640e5ea-d80e-4302-9690-5ca9efaa9879-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.434004 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1640e5ea-d80e-4302-9690-5ca9efaa9879-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.434483 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.435309 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1640e5ea-d80e-4302-9690-5ca9efaa9879-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.436128 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1640e5ea-d80e-4302-9690-5ca9efaa9879-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.436133 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1640e5ea-d80e-4302-9690-5ca9efaa9879-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.439259 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1640e5ea-d80e-4302-9690-5ca9efaa9879-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.439478 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1640e5ea-d80e-4302-9690-5ca9efaa9879-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.439490 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1640e5ea-d80e-4302-9690-5ca9efaa9879-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.452105 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-854lm\" (UniqueName: \"kubernetes.io/projected/1640e5ea-d80e-4302-9690-5ca9efaa9879-kube-api-access-854lm\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.495002 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1640e5ea-d80e-4302-9690-5ca9efaa9879\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.517872 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.836452 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-trdt2"] Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.838498 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.844394 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.844449 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cr7fz\" (UniqueName: \"kubernetes.io/projected/2413827d-80bc-4936-8793-50d647b24d11-kube-api-access-cr7fz\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.844525 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.844592 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.844637 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.844689 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-config\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.844709 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.875237 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.883802 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-trdt2"] Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.947227 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.947282 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.947331 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.947383 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-config\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.947403 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.947435 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.947462 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cr7fz\" (UniqueName: \"kubernetes.io/projected/2413827d-80bc-4936-8793-50d647b24d11-kube-api-access-cr7fz\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.948606 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.948734 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.948799 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.948796 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.948833 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.949046 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-config\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:54 crc kubenswrapper[4902]: I1202 14:39:54.965936 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cr7fz\" (UniqueName: \"kubernetes.io/projected/2413827d-80bc-4936-8793-50d647b24d11-kube-api-access-cr7fz\") pod \"dnsmasq-dns-79bd4cc8c9-trdt2\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:55 crc kubenswrapper[4902]: W1202 14:39:55.046759 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1640e5ea_d80e_4302_9690_5ca9efaa9879.slice/crio-8d30a7a9e6a6f6bab750f68a9a9c2d16d030a2e177edecbe14c3e8dee7281bca WatchSource:0}: Error finding container 8d30a7a9e6a6f6bab750f68a9a9c2d16d030a2e177edecbe14c3e8dee7281bca: Status 404 returned error can't find the container with id 8d30a7a9e6a6f6bab750f68a9a9c2d16d030a2e177edecbe14c3e8dee7281bca Dec 02 14:39:55 crc kubenswrapper[4902]: I1202 14:39:55.051185 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 14:39:55 crc kubenswrapper[4902]: I1202 14:39:55.118088 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c350efc-48d5-4e5d-acee-399252e1b24a" path="/var/lib/kubelet/pods/4c350efc-48d5-4e5d-acee-399252e1b24a/volumes" Dec 02 14:39:55 crc kubenswrapper[4902]: I1202 14:39:55.119336 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abeb5bdf-6307-4dc9-9a97-d638817b544c" path="/var/lib/kubelet/pods/abeb5bdf-6307-4dc9-9a97-d638817b544c/volumes" Dec 02 14:39:55 crc kubenswrapper[4902]: I1202 14:39:55.136716 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b","Type":"ContainerStarted","Data":"ac6e2c4d3dcceea9d2c68157a534aca78eb3284d2426cc0699c0541cc56ca5a6"} Dec 02 14:39:55 crc kubenswrapper[4902]: I1202 14:39:55.137573 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1640e5ea-d80e-4302-9690-5ca9efaa9879","Type":"ContainerStarted","Data":"8d30a7a9e6a6f6bab750f68a9a9c2d16d030a2e177edecbe14c3e8dee7281bca"} Dec 02 14:39:55 crc kubenswrapper[4902]: I1202 14:39:55.239234 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:55 crc kubenswrapper[4902]: E1202 14:39:55.721618 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd81580a2_c346_4315_aee2_d6346b5c55d0.slice/crio-8293b8b3f78faad9ecd5a4971fc8dc5bf5ee73c1355930b0c0ecc3694341af48\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd81580a2_c346_4315_aee2_d6346b5c55d0.slice\": RecentStats: unable to find data in memory cache]" Dec 02 14:39:55 crc kubenswrapper[4902]: I1202 14:39:55.776319 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-trdt2"] Dec 02 14:39:55 crc kubenswrapper[4902]: W1202 14:39:55.778307 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2413827d_80bc_4936_8793_50d647b24d11.slice/crio-767477bd280d052827dab346a62929f25dfd7911299043a89844e0b2002906dc WatchSource:0}: Error finding container 767477bd280d052827dab346a62929f25dfd7911299043a89844e0b2002906dc: Status 404 returned error can't find the container with id 767477bd280d052827dab346a62929f25dfd7911299043a89844e0b2002906dc Dec 02 14:39:56 crc kubenswrapper[4902]: I1202 14:39:56.158920 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b","Type":"ContainerStarted","Data":"8bd40e984d6a8b543fffbd9b583040b76e9b23ba51d7532821c88d0ea3999ad8"} Dec 02 14:39:56 crc kubenswrapper[4902]: I1202 14:39:56.162121 4902 generic.go:334] "Generic (PLEG): container finished" podID="2413827d-80bc-4936-8793-50d647b24d11" containerID="2df454ffb89503d10223b4b5788d31a67a543c6ba5b3558ed064384ab380ccd3" exitCode=0 Dec 02 14:39:56 crc kubenswrapper[4902]: I1202 14:39:56.162182 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" event={"ID":"2413827d-80bc-4936-8793-50d647b24d11","Type":"ContainerDied","Data":"2df454ffb89503d10223b4b5788d31a67a543c6ba5b3558ed064384ab380ccd3"} Dec 02 14:39:56 crc kubenswrapper[4902]: I1202 14:39:56.162238 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" event={"ID":"2413827d-80bc-4936-8793-50d647b24d11","Type":"ContainerStarted","Data":"767477bd280d052827dab346a62929f25dfd7911299043a89844e0b2002906dc"} Dec 02 14:39:57 crc kubenswrapper[4902]: I1202 14:39:57.174367 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" event={"ID":"2413827d-80bc-4936-8793-50d647b24d11","Type":"ContainerStarted","Data":"3820f75a863b67ce12689ecd05bf56e2a8c5fc5b1ce0e24134eaf744218083e4"} Dec 02 14:39:57 crc kubenswrapper[4902]: I1202 14:39:57.174994 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:39:57 crc kubenswrapper[4902]: I1202 14:39:57.217124 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" podStartSLOduration=3.217097108 podStartE2EDuration="3.217097108s" podCreationTimestamp="2025-12-02 14:39:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:39:57.197878414 +0000 UTC m=+1428.389187123" watchObservedRunningTime="2025-12-02 14:39:57.217097108 +0000 UTC m=+1428.408405837" Dec 02 14:39:58 crc kubenswrapper[4902]: I1202 14:39:58.189959 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1640e5ea-d80e-4302-9690-5ca9efaa9879","Type":"ContainerStarted","Data":"7aa51f2e6f516583a2e3fc459579f78e686c42e28614cf1244be7f384308d265"} Dec 02 14:39:59 crc kubenswrapper[4902]: I1202 14:39:59.996950 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dm229"] Dec 02 14:39:59 crc kubenswrapper[4902]: I1202 14:39:59.999864 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:00 crc kubenswrapper[4902]: I1202 14:40:00.030867 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dm229"] Dec 02 14:40:00 crc kubenswrapper[4902]: I1202 14:40:00.153131 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8pz5\" (UniqueName: \"kubernetes.io/projected/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-kube-api-access-s8pz5\") pod \"redhat-operators-dm229\" (UID: \"eeafdc9c-6dad-4084-a34d-497f7d8aa00f\") " pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:00 crc kubenswrapper[4902]: I1202 14:40:00.153277 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-catalog-content\") pod \"redhat-operators-dm229\" (UID: \"eeafdc9c-6dad-4084-a34d-497f7d8aa00f\") " pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:00 crc kubenswrapper[4902]: I1202 14:40:00.153339 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-utilities\") pod \"redhat-operators-dm229\" (UID: \"eeafdc9c-6dad-4084-a34d-497f7d8aa00f\") " pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:00 crc kubenswrapper[4902]: I1202 14:40:00.256264 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8pz5\" (UniqueName: \"kubernetes.io/projected/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-kube-api-access-s8pz5\") pod \"redhat-operators-dm229\" (UID: \"eeafdc9c-6dad-4084-a34d-497f7d8aa00f\") " pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:00 crc kubenswrapper[4902]: I1202 14:40:00.256400 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-catalog-content\") pod \"redhat-operators-dm229\" (UID: \"eeafdc9c-6dad-4084-a34d-497f7d8aa00f\") " pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:00 crc kubenswrapper[4902]: I1202 14:40:00.256441 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-utilities\") pod \"redhat-operators-dm229\" (UID: \"eeafdc9c-6dad-4084-a34d-497f7d8aa00f\") " pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:00 crc kubenswrapper[4902]: I1202 14:40:00.256997 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-catalog-content\") pod \"redhat-operators-dm229\" (UID: \"eeafdc9c-6dad-4084-a34d-497f7d8aa00f\") " pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:00 crc kubenswrapper[4902]: I1202 14:40:00.257405 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-utilities\") pod \"redhat-operators-dm229\" (UID: \"eeafdc9c-6dad-4084-a34d-497f7d8aa00f\") " pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:00 crc kubenswrapper[4902]: I1202 14:40:00.279112 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8pz5\" (UniqueName: \"kubernetes.io/projected/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-kube-api-access-s8pz5\") pod \"redhat-operators-dm229\" (UID: \"eeafdc9c-6dad-4084-a34d-497f7d8aa00f\") " pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:00 crc kubenswrapper[4902]: I1202 14:40:00.350172 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:00 crc kubenswrapper[4902]: I1202 14:40:00.838131 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dm229"] Dec 02 14:40:00 crc kubenswrapper[4902]: W1202 14:40:00.851804 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeeafdc9c_6dad_4084_a34d_497f7d8aa00f.slice/crio-f3b2a95f271c3ce5887dfd5d7dc7bd4477a6bd7db2367836553b5a6c5504e323 WatchSource:0}: Error finding container f3b2a95f271c3ce5887dfd5d7dc7bd4477a6bd7db2367836553b5a6c5504e323: Status 404 returned error can't find the container with id f3b2a95f271c3ce5887dfd5d7dc7bd4477a6bd7db2367836553b5a6c5504e323 Dec 02 14:40:01 crc kubenswrapper[4902]: I1202 14:40:01.233615 4902 generic.go:334] "Generic (PLEG): container finished" podID="eeafdc9c-6dad-4084-a34d-497f7d8aa00f" containerID="0bcbd9e8e24259eeb182e7679cded7b0e0c19d8debfcc36ce3ddd7e52b1a4774" exitCode=0 Dec 02 14:40:01 crc kubenswrapper[4902]: I1202 14:40:01.233782 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dm229" event={"ID":"eeafdc9c-6dad-4084-a34d-497f7d8aa00f","Type":"ContainerDied","Data":"0bcbd9e8e24259eeb182e7679cded7b0e0c19d8debfcc36ce3ddd7e52b1a4774"} Dec 02 14:40:01 crc kubenswrapper[4902]: I1202 14:40:01.233890 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dm229" event={"ID":"eeafdc9c-6dad-4084-a34d-497f7d8aa00f","Type":"ContainerStarted","Data":"f3b2a95f271c3ce5887dfd5d7dc7bd4477a6bd7db2367836553b5a6c5504e323"} Dec 02 14:40:03 crc kubenswrapper[4902]: I1202 14:40:03.266952 4902 generic.go:334] "Generic (PLEG): container finished" podID="eeafdc9c-6dad-4084-a34d-497f7d8aa00f" containerID="f0b3ce9604b83aff4f08ed419e3391e7615b3f570fdcb250ed729ecd641d33f4" exitCode=0 Dec 02 14:40:03 crc kubenswrapper[4902]: I1202 14:40:03.266993 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dm229" event={"ID":"eeafdc9c-6dad-4084-a34d-497f7d8aa00f","Type":"ContainerDied","Data":"f0b3ce9604b83aff4f08ed419e3391e7615b3f570fdcb250ed729ecd641d33f4"} Dec 02 14:40:04 crc kubenswrapper[4902]: I1202 14:40:04.280227 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dm229" event={"ID":"eeafdc9c-6dad-4084-a34d-497f7d8aa00f","Type":"ContainerStarted","Data":"de73c7d4d99a477edc8b1f3455214bafdbbef37599038a76ccff12716c139d5b"} Dec 02 14:40:04 crc kubenswrapper[4902]: I1202 14:40:04.298957 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dm229" podStartSLOduration=2.526842895 podStartE2EDuration="5.298937293s" podCreationTimestamp="2025-12-02 14:39:59 +0000 UTC" firstStartedPulling="2025-12-02 14:40:01.23535363 +0000 UTC m=+1432.426662339" lastFinishedPulling="2025-12-02 14:40:04.007447988 +0000 UTC m=+1435.198756737" observedRunningTime="2025-12-02 14:40:04.294692583 +0000 UTC m=+1435.486001302" watchObservedRunningTime="2025-12-02 14:40:04.298937293 +0000 UTC m=+1435.490246002" Dec 02 14:40:04 crc kubenswrapper[4902]: I1202 14:40:04.731730 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:40:04 crc kubenswrapper[4902]: I1202 14:40:04.731794 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.241772 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.391320 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6nlh8"] Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.391678 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" podUID="fb9632b1-922e-4ee3-a4e0-a3134cd34db4" containerName="dnsmasq-dns" containerID="cri-o://40bb52ca131207a02dba7c7c1c0d21d20cdf0f458b94d87b1638a436b87fcf24" gracePeriod=10 Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.524865 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6cd9bffc9-84qbs"] Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.529198 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.553666 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cd9bffc9-84qbs"] Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.716271 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-config\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.716318 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm5zl\" (UniqueName: \"kubernetes.io/projected/a97892a2-52af-4c1b-9db8-be3b7522774d-kube-api-access-qm5zl\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.716344 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-dns-svc\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.716378 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-ovsdbserver-sb\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.716673 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-openstack-edpm-ipam\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.716761 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-dns-swift-storage-0\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.716907 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-ovsdbserver-nb\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.817487 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm5zl\" (UniqueName: \"kubernetes.io/projected/a97892a2-52af-4c1b-9db8-be3b7522774d-kube-api-access-qm5zl\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.817535 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-config\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.817573 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-dns-svc\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.817612 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-ovsdbserver-sb\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.817674 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-openstack-edpm-ipam\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.817703 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-dns-swift-storage-0\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.817764 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-ovsdbserver-nb\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.818701 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-ovsdbserver-nb\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.821145 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-config\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.822388 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-ovsdbserver-sb\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.822975 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-dns-svc\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.823501 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-openstack-edpm-ipam\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.824004 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a97892a2-52af-4c1b-9db8-be3b7522774d-dns-swift-storage-0\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.869481 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm5zl\" (UniqueName: \"kubernetes.io/projected/a97892a2-52af-4c1b-9db8-be3b7522774d-kube-api-access-qm5zl\") pod \"dnsmasq-dns-6cd9bffc9-84qbs\" (UID: \"a97892a2-52af-4c1b-9db8-be3b7522774d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.885904 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:05 crc kubenswrapper[4902]: I1202 14:40:05.998691 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:40:06 crc kubenswrapper[4902]: E1202 14:40:06.055173 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd81580a2_c346_4315_aee2_d6346b5c55d0.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd81580a2_c346_4315_aee2_d6346b5c55d0.slice/crio-8293b8b3f78faad9ecd5a4971fc8dc5bf5ee73c1355930b0c0ecc3694341af48\": RecentStats: unable to find data in memory cache]" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.123347 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-dns-svc\") pod \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.123753 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-config\") pod \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.123810 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-ovsdbserver-sb\") pod \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.123882 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-dns-swift-storage-0\") pod \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.124022 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6txd\" (UniqueName: \"kubernetes.io/projected/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-kube-api-access-p6txd\") pod \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.124100 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-ovsdbserver-nb\") pod \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\" (UID: \"fb9632b1-922e-4ee3-a4e0-a3134cd34db4\") " Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.130761 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-kube-api-access-p6txd" (OuterVolumeSpecName: "kube-api-access-p6txd") pod "fb9632b1-922e-4ee3-a4e0-a3134cd34db4" (UID: "fb9632b1-922e-4ee3-a4e0-a3134cd34db4"). InnerVolumeSpecName "kube-api-access-p6txd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.205821 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fb9632b1-922e-4ee3-a4e0-a3134cd34db4" (UID: "fb9632b1-922e-4ee3-a4e0-a3134cd34db4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.219515 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-config" (OuterVolumeSpecName: "config") pod "fb9632b1-922e-4ee3-a4e0-a3134cd34db4" (UID: "fb9632b1-922e-4ee3-a4e0-a3134cd34db4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.222767 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fb9632b1-922e-4ee3-a4e0-a3134cd34db4" (UID: "fb9632b1-922e-4ee3-a4e0-a3134cd34db4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.226436 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.226524 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.226618 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.226679 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6txd\" (UniqueName: \"kubernetes.io/projected/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-kube-api-access-p6txd\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.226447 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "fb9632b1-922e-4ee3-a4e0-a3134cd34db4" (UID: "fb9632b1-922e-4ee3-a4e0-a3134cd34db4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.257370 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fb9632b1-922e-4ee3-a4e0-a3134cd34db4" (UID: "fb9632b1-922e-4ee3-a4e0-a3134cd34db4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.328977 4902 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.329014 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fb9632b1-922e-4ee3-a4e0-a3134cd34db4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.345187 4902 generic.go:334] "Generic (PLEG): container finished" podID="fb9632b1-922e-4ee3-a4e0-a3134cd34db4" containerID="40bb52ca131207a02dba7c7c1c0d21d20cdf0f458b94d87b1638a436b87fcf24" exitCode=0 Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.345238 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" event={"ID":"fb9632b1-922e-4ee3-a4e0-a3134cd34db4","Type":"ContainerDied","Data":"40bb52ca131207a02dba7c7c1c0d21d20cdf0f458b94d87b1638a436b87fcf24"} Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.345272 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" event={"ID":"fb9632b1-922e-4ee3-a4e0-a3134cd34db4","Type":"ContainerDied","Data":"81bc3c293e1e84d751702d82b11771e730275086f59e1176bd6378052cf08621"} Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.345295 4902 scope.go:117] "RemoveContainer" containerID="40bb52ca131207a02dba7c7c1c0d21d20cdf0f458b94d87b1638a436b87fcf24" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.345451 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-6nlh8" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.372716 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cd9bffc9-84qbs"] Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.373250 4902 scope.go:117] "RemoveContainer" containerID="057c0e91e4e185f33e2a90694a9f3b06a74d4747c05411f3f7a279da02badd70" Dec 02 14:40:06 crc kubenswrapper[4902]: W1202 14:40:06.379707 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda97892a2_52af_4c1b_9db8_be3b7522774d.slice/crio-8bf1b78b8a9579d277780564184a801ca5a55388fda3cf93d4ecc2622dd65a54 WatchSource:0}: Error finding container 8bf1b78b8a9579d277780564184a801ca5a55388fda3cf93d4ecc2622dd65a54: Status 404 returned error can't find the container with id 8bf1b78b8a9579d277780564184a801ca5a55388fda3cf93d4ecc2622dd65a54 Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.410342 4902 scope.go:117] "RemoveContainer" containerID="40bb52ca131207a02dba7c7c1c0d21d20cdf0f458b94d87b1638a436b87fcf24" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.410707 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6nlh8"] Dec 02 14:40:06 crc kubenswrapper[4902]: E1202 14:40:06.411703 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40bb52ca131207a02dba7c7c1c0d21d20cdf0f458b94d87b1638a436b87fcf24\": container with ID starting with 40bb52ca131207a02dba7c7c1c0d21d20cdf0f458b94d87b1638a436b87fcf24 not found: ID does not exist" containerID="40bb52ca131207a02dba7c7c1c0d21d20cdf0f458b94d87b1638a436b87fcf24" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.411741 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40bb52ca131207a02dba7c7c1c0d21d20cdf0f458b94d87b1638a436b87fcf24"} err="failed to get container status \"40bb52ca131207a02dba7c7c1c0d21d20cdf0f458b94d87b1638a436b87fcf24\": rpc error: code = NotFound desc = could not find container \"40bb52ca131207a02dba7c7c1c0d21d20cdf0f458b94d87b1638a436b87fcf24\": container with ID starting with 40bb52ca131207a02dba7c7c1c0d21d20cdf0f458b94d87b1638a436b87fcf24 not found: ID does not exist" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.411898 4902 scope.go:117] "RemoveContainer" containerID="057c0e91e4e185f33e2a90694a9f3b06a74d4747c05411f3f7a279da02badd70" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.422428 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6nlh8"] Dec 02 14:40:06 crc kubenswrapper[4902]: E1202 14:40:06.428781 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"057c0e91e4e185f33e2a90694a9f3b06a74d4747c05411f3f7a279da02badd70\": container with ID starting with 057c0e91e4e185f33e2a90694a9f3b06a74d4747c05411f3f7a279da02badd70 not found: ID does not exist" containerID="057c0e91e4e185f33e2a90694a9f3b06a74d4747c05411f3f7a279da02badd70" Dec 02 14:40:06 crc kubenswrapper[4902]: I1202 14:40:06.428832 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"057c0e91e4e185f33e2a90694a9f3b06a74d4747c05411f3f7a279da02badd70"} err="failed to get container status \"057c0e91e4e185f33e2a90694a9f3b06a74d4747c05411f3f7a279da02badd70\": rpc error: code = NotFound desc = could not find container \"057c0e91e4e185f33e2a90694a9f3b06a74d4747c05411f3f7a279da02badd70\": container with ID starting with 057c0e91e4e185f33e2a90694a9f3b06a74d4747c05411f3f7a279da02badd70 not found: ID does not exist" Dec 02 14:40:07 crc kubenswrapper[4902]: I1202 14:40:07.143230 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb9632b1-922e-4ee3-a4e0-a3134cd34db4" path="/var/lib/kubelet/pods/fb9632b1-922e-4ee3-a4e0-a3134cd34db4/volumes" Dec 02 14:40:07 crc kubenswrapper[4902]: I1202 14:40:07.356922 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" event={"ID":"a97892a2-52af-4c1b-9db8-be3b7522774d","Type":"ContainerStarted","Data":"a42706d470958b51fe5097ecd5b11630e1b551054432885db51b6edcdd274045"} Dec 02 14:40:07 crc kubenswrapper[4902]: I1202 14:40:07.356981 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" event={"ID":"a97892a2-52af-4c1b-9db8-be3b7522774d","Type":"ContainerStarted","Data":"8bf1b78b8a9579d277780564184a801ca5a55388fda3cf93d4ecc2622dd65a54"} Dec 02 14:40:08 crc kubenswrapper[4902]: I1202 14:40:08.367209 4902 generic.go:334] "Generic (PLEG): container finished" podID="a97892a2-52af-4c1b-9db8-be3b7522774d" containerID="a42706d470958b51fe5097ecd5b11630e1b551054432885db51b6edcdd274045" exitCode=0 Dec 02 14:40:08 crc kubenswrapper[4902]: I1202 14:40:08.367250 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" event={"ID":"a97892a2-52af-4c1b-9db8-be3b7522774d","Type":"ContainerDied","Data":"a42706d470958b51fe5097ecd5b11630e1b551054432885db51b6edcdd274045"} Dec 02 14:40:10 crc kubenswrapper[4902]: I1202 14:40:10.350680 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:10 crc kubenswrapper[4902]: I1202 14:40:10.352624 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:10 crc kubenswrapper[4902]: I1202 14:40:10.426833 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:11 crc kubenswrapper[4902]: I1202 14:40:11.412938 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" event={"ID":"a97892a2-52af-4c1b-9db8-be3b7522774d","Type":"ContainerStarted","Data":"8021cd419d239be7bc5a5beb959aae3fd8b4b269481239ef4470ac5f33b0e73b"} Dec 02 14:40:11 crc kubenswrapper[4902]: I1202 14:40:11.450390 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" podStartSLOduration=6.450370125 podStartE2EDuration="6.450370125s" podCreationTimestamp="2025-12-02 14:40:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:40:11.437822229 +0000 UTC m=+1442.629130958" watchObservedRunningTime="2025-12-02 14:40:11.450370125 +0000 UTC m=+1442.641678844" Dec 02 14:40:11 crc kubenswrapper[4902]: I1202 14:40:11.488097 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:11 crc kubenswrapper[4902]: I1202 14:40:11.545993 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dm229"] Dec 02 14:40:12 crc kubenswrapper[4902]: I1202 14:40:12.425934 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:13 crc kubenswrapper[4902]: I1202 14:40:13.438190 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dm229" podUID="eeafdc9c-6dad-4084-a34d-497f7d8aa00f" containerName="registry-server" containerID="cri-o://de73c7d4d99a477edc8b1f3455214bafdbbef37599038a76ccff12716c139d5b" gracePeriod=2 Dec 02 14:40:13 crc kubenswrapper[4902]: I1202 14:40:13.952853 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.019594 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-utilities\") pod \"eeafdc9c-6dad-4084-a34d-497f7d8aa00f\" (UID: \"eeafdc9c-6dad-4084-a34d-497f7d8aa00f\") " Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.019860 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8pz5\" (UniqueName: \"kubernetes.io/projected/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-kube-api-access-s8pz5\") pod \"eeafdc9c-6dad-4084-a34d-497f7d8aa00f\" (UID: \"eeafdc9c-6dad-4084-a34d-497f7d8aa00f\") " Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.020015 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-catalog-content\") pod \"eeafdc9c-6dad-4084-a34d-497f7d8aa00f\" (UID: \"eeafdc9c-6dad-4084-a34d-497f7d8aa00f\") " Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.020786 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-utilities" (OuterVolumeSpecName: "utilities") pod "eeafdc9c-6dad-4084-a34d-497f7d8aa00f" (UID: "eeafdc9c-6dad-4084-a34d-497f7d8aa00f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.030814 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-kube-api-access-s8pz5" (OuterVolumeSpecName: "kube-api-access-s8pz5") pod "eeafdc9c-6dad-4084-a34d-497f7d8aa00f" (UID: "eeafdc9c-6dad-4084-a34d-497f7d8aa00f"). InnerVolumeSpecName "kube-api-access-s8pz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.126592 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.126638 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8pz5\" (UniqueName: \"kubernetes.io/projected/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-kube-api-access-s8pz5\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.153214 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eeafdc9c-6dad-4084-a34d-497f7d8aa00f" (UID: "eeafdc9c-6dad-4084-a34d-497f7d8aa00f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.228587 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eeafdc9c-6dad-4084-a34d-497f7d8aa00f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.456672 4902 generic.go:334] "Generic (PLEG): container finished" podID="eeafdc9c-6dad-4084-a34d-497f7d8aa00f" containerID="de73c7d4d99a477edc8b1f3455214bafdbbef37599038a76ccff12716c139d5b" exitCode=0 Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.456740 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dm229" event={"ID":"eeafdc9c-6dad-4084-a34d-497f7d8aa00f","Type":"ContainerDied","Data":"de73c7d4d99a477edc8b1f3455214bafdbbef37599038a76ccff12716c139d5b"} Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.456784 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dm229" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.456816 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dm229" event={"ID":"eeafdc9c-6dad-4084-a34d-497f7d8aa00f","Type":"ContainerDied","Data":"f3b2a95f271c3ce5887dfd5d7dc7bd4477a6bd7db2367836553b5a6c5504e323"} Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.456865 4902 scope.go:117] "RemoveContainer" containerID="de73c7d4d99a477edc8b1f3455214bafdbbef37599038a76ccff12716c139d5b" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.502081 4902 scope.go:117] "RemoveContainer" containerID="f0b3ce9604b83aff4f08ed419e3391e7615b3f570fdcb250ed729ecd641d33f4" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.520845 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dm229"] Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.533439 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dm229"] Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.565764 4902 scope.go:117] "RemoveContainer" containerID="0bcbd9e8e24259eeb182e7679cded7b0e0c19d8debfcc36ce3ddd7e52b1a4774" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.617105 4902 scope.go:117] "RemoveContainer" containerID="de73c7d4d99a477edc8b1f3455214bafdbbef37599038a76ccff12716c139d5b" Dec 02 14:40:14 crc kubenswrapper[4902]: E1202 14:40:14.617703 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de73c7d4d99a477edc8b1f3455214bafdbbef37599038a76ccff12716c139d5b\": container with ID starting with de73c7d4d99a477edc8b1f3455214bafdbbef37599038a76ccff12716c139d5b not found: ID does not exist" containerID="de73c7d4d99a477edc8b1f3455214bafdbbef37599038a76ccff12716c139d5b" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.617773 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de73c7d4d99a477edc8b1f3455214bafdbbef37599038a76ccff12716c139d5b"} err="failed to get container status \"de73c7d4d99a477edc8b1f3455214bafdbbef37599038a76ccff12716c139d5b\": rpc error: code = NotFound desc = could not find container \"de73c7d4d99a477edc8b1f3455214bafdbbef37599038a76ccff12716c139d5b\": container with ID starting with de73c7d4d99a477edc8b1f3455214bafdbbef37599038a76ccff12716c139d5b not found: ID does not exist" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.617832 4902 scope.go:117] "RemoveContainer" containerID="f0b3ce9604b83aff4f08ed419e3391e7615b3f570fdcb250ed729ecd641d33f4" Dec 02 14:40:14 crc kubenswrapper[4902]: E1202 14:40:14.618395 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0b3ce9604b83aff4f08ed419e3391e7615b3f570fdcb250ed729ecd641d33f4\": container with ID starting with f0b3ce9604b83aff4f08ed419e3391e7615b3f570fdcb250ed729ecd641d33f4 not found: ID does not exist" containerID="f0b3ce9604b83aff4f08ed419e3391e7615b3f570fdcb250ed729ecd641d33f4" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.618444 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0b3ce9604b83aff4f08ed419e3391e7615b3f570fdcb250ed729ecd641d33f4"} err="failed to get container status \"f0b3ce9604b83aff4f08ed419e3391e7615b3f570fdcb250ed729ecd641d33f4\": rpc error: code = NotFound desc = could not find container \"f0b3ce9604b83aff4f08ed419e3391e7615b3f570fdcb250ed729ecd641d33f4\": container with ID starting with f0b3ce9604b83aff4f08ed419e3391e7615b3f570fdcb250ed729ecd641d33f4 not found: ID does not exist" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.618482 4902 scope.go:117] "RemoveContainer" containerID="0bcbd9e8e24259eeb182e7679cded7b0e0c19d8debfcc36ce3ddd7e52b1a4774" Dec 02 14:40:14 crc kubenswrapper[4902]: E1202 14:40:14.618871 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bcbd9e8e24259eeb182e7679cded7b0e0c19d8debfcc36ce3ddd7e52b1a4774\": container with ID starting with 0bcbd9e8e24259eeb182e7679cded7b0e0c19d8debfcc36ce3ddd7e52b1a4774 not found: ID does not exist" containerID="0bcbd9e8e24259eeb182e7679cded7b0e0c19d8debfcc36ce3ddd7e52b1a4774" Dec 02 14:40:14 crc kubenswrapper[4902]: I1202 14:40:14.618932 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bcbd9e8e24259eeb182e7679cded7b0e0c19d8debfcc36ce3ddd7e52b1a4774"} err="failed to get container status \"0bcbd9e8e24259eeb182e7679cded7b0e0c19d8debfcc36ce3ddd7e52b1a4774\": rpc error: code = NotFound desc = could not find container \"0bcbd9e8e24259eeb182e7679cded7b0e0c19d8debfcc36ce3ddd7e52b1a4774\": container with ID starting with 0bcbd9e8e24259eeb182e7679cded7b0e0c19d8debfcc36ce3ddd7e52b1a4774 not found: ID does not exist" Dec 02 14:40:15 crc kubenswrapper[4902]: I1202 14:40:15.133665 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eeafdc9c-6dad-4084-a34d-497f7d8aa00f" path="/var/lib/kubelet/pods/eeafdc9c-6dad-4084-a34d-497f7d8aa00f/volumes" Dec 02 14:40:15 crc kubenswrapper[4902]: I1202 14:40:15.888141 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6cd9bffc9-84qbs" Dec 02 14:40:15 crc kubenswrapper[4902]: I1202 14:40:15.941225 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-trdt2"] Dec 02 14:40:15 crc kubenswrapper[4902]: I1202 14:40:15.941510 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" podUID="2413827d-80bc-4936-8793-50d647b24d11" containerName="dnsmasq-dns" containerID="cri-o://3820f75a863b67ce12689ecd05bf56e2a8c5fc5b1ce0e24134eaf744218083e4" gracePeriod=10 Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.443652 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.484655 4902 generic.go:334] "Generic (PLEG): container finished" podID="2413827d-80bc-4936-8793-50d647b24d11" containerID="3820f75a863b67ce12689ecd05bf56e2a8c5fc5b1ce0e24134eaf744218083e4" exitCode=0 Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.485138 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" event={"ID":"2413827d-80bc-4936-8793-50d647b24d11","Type":"ContainerDied","Data":"3820f75a863b67ce12689ecd05bf56e2a8c5fc5b1ce0e24134eaf744218083e4"} Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.485222 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" event={"ID":"2413827d-80bc-4936-8793-50d647b24d11","Type":"ContainerDied","Data":"767477bd280d052827dab346a62929f25dfd7911299043a89844e0b2002906dc"} Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.485288 4902 scope.go:117] "RemoveContainer" containerID="3820f75a863b67ce12689ecd05bf56e2a8c5fc5b1ce0e24134eaf744218083e4" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.485522 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-trdt2" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.508734 4902 scope.go:117] "RemoveContainer" containerID="2df454ffb89503d10223b4b5788d31a67a543c6ba5b3558ed064384ab380ccd3" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.527804 4902 scope.go:117] "RemoveContainer" containerID="3820f75a863b67ce12689ecd05bf56e2a8c5fc5b1ce0e24134eaf744218083e4" Dec 02 14:40:16 crc kubenswrapper[4902]: E1202 14:40:16.528477 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3820f75a863b67ce12689ecd05bf56e2a8c5fc5b1ce0e24134eaf744218083e4\": container with ID starting with 3820f75a863b67ce12689ecd05bf56e2a8c5fc5b1ce0e24134eaf744218083e4 not found: ID does not exist" containerID="3820f75a863b67ce12689ecd05bf56e2a8c5fc5b1ce0e24134eaf744218083e4" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.528539 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3820f75a863b67ce12689ecd05bf56e2a8c5fc5b1ce0e24134eaf744218083e4"} err="failed to get container status \"3820f75a863b67ce12689ecd05bf56e2a8c5fc5b1ce0e24134eaf744218083e4\": rpc error: code = NotFound desc = could not find container \"3820f75a863b67ce12689ecd05bf56e2a8c5fc5b1ce0e24134eaf744218083e4\": container with ID starting with 3820f75a863b67ce12689ecd05bf56e2a8c5fc5b1ce0e24134eaf744218083e4 not found: ID does not exist" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.528717 4902 scope.go:117] "RemoveContainer" containerID="2df454ffb89503d10223b4b5788d31a67a543c6ba5b3558ed064384ab380ccd3" Dec 02 14:40:16 crc kubenswrapper[4902]: E1202 14:40:16.529150 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2df454ffb89503d10223b4b5788d31a67a543c6ba5b3558ed064384ab380ccd3\": container with ID starting with 2df454ffb89503d10223b4b5788d31a67a543c6ba5b3558ed064384ab380ccd3 not found: ID does not exist" containerID="2df454ffb89503d10223b4b5788d31a67a543c6ba5b3558ed064384ab380ccd3" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.529174 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2df454ffb89503d10223b4b5788d31a67a543c6ba5b3558ed064384ab380ccd3"} err="failed to get container status \"2df454ffb89503d10223b4b5788d31a67a543c6ba5b3558ed064384ab380ccd3\": rpc error: code = NotFound desc = could not find container \"2df454ffb89503d10223b4b5788d31a67a543c6ba5b3558ed064384ab380ccd3\": container with ID starting with 2df454ffb89503d10223b4b5788d31a67a543c6ba5b3558ed064384ab380ccd3 not found: ID does not exist" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.531426 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-config\") pod \"2413827d-80bc-4936-8793-50d647b24d11\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.531635 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-openstack-edpm-ipam\") pod \"2413827d-80bc-4936-8793-50d647b24d11\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.531756 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-ovsdbserver-sb\") pod \"2413827d-80bc-4936-8793-50d647b24d11\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.531816 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cr7fz\" (UniqueName: \"kubernetes.io/projected/2413827d-80bc-4936-8793-50d647b24d11-kube-api-access-cr7fz\") pod \"2413827d-80bc-4936-8793-50d647b24d11\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.531860 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-dns-swift-storage-0\") pod \"2413827d-80bc-4936-8793-50d647b24d11\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.532060 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-ovsdbserver-nb\") pod \"2413827d-80bc-4936-8793-50d647b24d11\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.532101 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-dns-svc\") pod \"2413827d-80bc-4936-8793-50d647b24d11\" (UID: \"2413827d-80bc-4936-8793-50d647b24d11\") " Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.541402 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2413827d-80bc-4936-8793-50d647b24d11-kube-api-access-cr7fz" (OuterVolumeSpecName: "kube-api-access-cr7fz") pod "2413827d-80bc-4936-8793-50d647b24d11" (UID: "2413827d-80bc-4936-8793-50d647b24d11"). InnerVolumeSpecName "kube-api-access-cr7fz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.601421 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "2413827d-80bc-4936-8793-50d647b24d11" (UID: "2413827d-80bc-4936-8793-50d647b24d11"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.601870 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2413827d-80bc-4936-8793-50d647b24d11" (UID: "2413827d-80bc-4936-8793-50d647b24d11"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.603229 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2413827d-80bc-4936-8793-50d647b24d11" (UID: "2413827d-80bc-4936-8793-50d647b24d11"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.604445 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2413827d-80bc-4936-8793-50d647b24d11" (UID: "2413827d-80bc-4936-8793-50d647b24d11"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.605097 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-config" (OuterVolumeSpecName: "config") pod "2413827d-80bc-4936-8793-50d647b24d11" (UID: "2413827d-80bc-4936-8793-50d647b24d11"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.608514 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2413827d-80bc-4936-8793-50d647b24d11" (UID: "2413827d-80bc-4936-8793-50d647b24d11"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.635107 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.635150 4902 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.635163 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-config\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.635217 4902 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.635231 4902 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.635241 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cr7fz\" (UniqueName: \"kubernetes.io/projected/2413827d-80bc-4936-8793-50d647b24d11-kube-api-access-cr7fz\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.635253 4902 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2413827d-80bc-4936-8793-50d647b24d11-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.872556 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-trdt2"] Dec 02 14:40:16 crc kubenswrapper[4902]: I1202 14:40:16.891274 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-trdt2"] Dec 02 14:40:17 crc kubenswrapper[4902]: I1202 14:40:17.142380 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2413827d-80bc-4936-8793-50d647b24d11" path="/var/lib/kubelet/pods/2413827d-80bc-4936-8793-50d647b24d11/volumes" Dec 02 14:40:28 crc kubenswrapper[4902]: I1202 14:40:28.665071 4902 generic.go:334] "Generic (PLEG): container finished" podID="6e03bb87-25fa-48ee-8e1a-20309d4d3a4b" containerID="8bd40e984d6a8b543fffbd9b583040b76e9b23ba51d7532821c88d0ea3999ad8" exitCode=0 Dec 02 14:40:28 crc kubenswrapper[4902]: I1202 14:40:28.665141 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b","Type":"ContainerDied","Data":"8bd40e984d6a8b543fffbd9b583040b76e9b23ba51d7532821c88d0ea3999ad8"} Dec 02 14:40:29 crc kubenswrapper[4902]: I1202 14:40:29.680076 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6e03bb87-25fa-48ee-8e1a-20309d4d3a4b","Type":"ContainerStarted","Data":"64bd8fd66810397c163ea8459cb72d22096c11d096ce153b2204668eed634fa1"} Dec 02 14:40:29 crc kubenswrapper[4902]: I1202 14:40:29.681158 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 02 14:40:29 crc kubenswrapper[4902]: I1202 14:40:29.708706 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.708690245 podStartE2EDuration="36.708690245s" podCreationTimestamp="2025-12-02 14:39:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:40:29.704444655 +0000 UTC m=+1460.895753384" watchObservedRunningTime="2025-12-02 14:40:29.708690245 +0000 UTC m=+1460.899998944" Dec 02 14:40:30 crc kubenswrapper[4902]: I1202 14:40:30.696803 4902 generic.go:334] "Generic (PLEG): container finished" podID="1640e5ea-d80e-4302-9690-5ca9efaa9879" containerID="7aa51f2e6f516583a2e3fc459579f78e686c42e28614cf1244be7f384308d265" exitCode=0 Dec 02 14:40:30 crc kubenswrapper[4902]: I1202 14:40:30.696920 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1640e5ea-d80e-4302-9690-5ca9efaa9879","Type":"ContainerDied","Data":"7aa51f2e6f516583a2e3fc459579f78e686c42e28614cf1244be7f384308d265"} Dec 02 14:40:31 crc kubenswrapper[4902]: I1202 14:40:31.707022 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1640e5ea-d80e-4302-9690-5ca9efaa9879","Type":"ContainerStarted","Data":"77f44f1a7b21345db147d1ee83652dda9ca83ebcfebe74ae8c90fe43e48f86a0"} Dec 02 14:40:31 crc kubenswrapper[4902]: I1202 14:40:31.707684 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:40:31 crc kubenswrapper[4902]: I1202 14:40:31.736086 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.736069194 podStartE2EDuration="37.736069194s" podCreationTimestamp="2025-12-02 14:39:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 14:40:31.732287986 +0000 UTC m=+1462.923596695" watchObservedRunningTime="2025-12-02 14:40:31.736069194 +0000 UTC m=+1462.927377903" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.000829 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf"] Dec 02 14:40:34 crc kubenswrapper[4902]: E1202 14:40:34.001601 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2413827d-80bc-4936-8793-50d647b24d11" containerName="init" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.001612 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="2413827d-80bc-4936-8793-50d647b24d11" containerName="init" Dec 02 14:40:34 crc kubenswrapper[4902]: E1202 14:40:34.001625 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeafdc9c-6dad-4084-a34d-497f7d8aa00f" containerName="extract-utilities" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.001631 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeafdc9c-6dad-4084-a34d-497f7d8aa00f" containerName="extract-utilities" Dec 02 14:40:34 crc kubenswrapper[4902]: E1202 14:40:34.001649 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb9632b1-922e-4ee3-a4e0-a3134cd34db4" containerName="init" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.001654 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb9632b1-922e-4ee3-a4e0-a3134cd34db4" containerName="init" Dec 02 14:40:34 crc kubenswrapper[4902]: E1202 14:40:34.001675 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2413827d-80bc-4936-8793-50d647b24d11" containerName="dnsmasq-dns" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.001681 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="2413827d-80bc-4936-8793-50d647b24d11" containerName="dnsmasq-dns" Dec 02 14:40:34 crc kubenswrapper[4902]: E1202 14:40:34.001695 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeafdc9c-6dad-4084-a34d-497f7d8aa00f" containerName="registry-server" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.001701 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeafdc9c-6dad-4084-a34d-497f7d8aa00f" containerName="registry-server" Dec 02 14:40:34 crc kubenswrapper[4902]: E1202 14:40:34.001712 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeafdc9c-6dad-4084-a34d-497f7d8aa00f" containerName="extract-content" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.001718 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeafdc9c-6dad-4084-a34d-497f7d8aa00f" containerName="extract-content" Dec 02 14:40:34 crc kubenswrapper[4902]: E1202 14:40:34.001739 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb9632b1-922e-4ee3-a4e0-a3134cd34db4" containerName="dnsmasq-dns" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.001744 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb9632b1-922e-4ee3-a4e0-a3134cd34db4" containerName="dnsmasq-dns" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.001919 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb9632b1-922e-4ee3-a4e0-a3134cd34db4" containerName="dnsmasq-dns" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.001937 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="2413827d-80bc-4936-8793-50d647b24d11" containerName="dnsmasq-dns" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.001944 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="eeafdc9c-6dad-4084-a34d-497f7d8aa00f" containerName="registry-server" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.002594 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.005993 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.006042 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.006109 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.006151 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.016330 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf"] Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.122965 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.123285 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.123369 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.123447 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-597kp\" (UniqueName: \"kubernetes.io/projected/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-kube-api-access-597kp\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.225776 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.225892 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.225941 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.225987 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-597kp\" (UniqueName: \"kubernetes.io/projected/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-kube-api-access-597kp\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.233944 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.235808 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.237463 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.244385 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-597kp\" (UniqueName: \"kubernetes.io/projected/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-kube-api-access-597kp\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.320770 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.731365 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.732425 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:40:34 crc kubenswrapper[4902]: I1202 14:40:34.936519 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf"] Dec 02 14:40:35 crc kubenswrapper[4902]: I1202 14:40:35.757934 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" event={"ID":"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d","Type":"ContainerStarted","Data":"a31cf46635c21dda1f8649d1a98a4837c95788de6ab26364d4a5dd4aca9eac68"} Dec 02 14:40:43 crc kubenswrapper[4902]: I1202 14:40:43.847773 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 02 14:40:44 crc kubenswrapper[4902]: I1202 14:40:44.521722 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 02 14:40:45 crc kubenswrapper[4902]: I1202 14:40:45.859501 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" event={"ID":"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d","Type":"ContainerStarted","Data":"5921f89f12ce304657ea23c25351e513f2eb20f4c9f9741949985e155508ef6c"} Dec 02 14:40:45 crc kubenswrapper[4902]: I1202 14:40:45.880462 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" podStartSLOduration=2.518437929 podStartE2EDuration="12.880443723s" podCreationTimestamp="2025-12-02 14:40:33 +0000 UTC" firstStartedPulling="2025-12-02 14:40:34.932479413 +0000 UTC m=+1466.123788122" lastFinishedPulling="2025-12-02 14:40:45.294485207 +0000 UTC m=+1476.485793916" observedRunningTime="2025-12-02 14:40:45.87539196 +0000 UTC m=+1477.066700689" watchObservedRunningTime="2025-12-02 14:40:45.880443723 +0000 UTC m=+1477.071752452" Dec 02 14:40:57 crc kubenswrapper[4902]: E1202 14:40:57.536889 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb851cac6_d9e2_4a83_92d8_dbd09ee7e38d.slice/crio-5921f89f12ce304657ea23c25351e513f2eb20f4c9f9741949985e155508ef6c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb851cac6_d9e2_4a83_92d8_dbd09ee7e38d.slice/crio-conmon-5921f89f12ce304657ea23c25351e513f2eb20f4c9f9741949985e155508ef6c.scope\": RecentStats: unable to find data in memory cache]" Dec 02 14:40:58 crc kubenswrapper[4902]: I1202 14:40:58.003903 4902 generic.go:334] "Generic (PLEG): container finished" podID="b851cac6-d9e2-4a83-92d8-dbd09ee7e38d" containerID="5921f89f12ce304657ea23c25351e513f2eb20f4c9f9741949985e155508ef6c" exitCode=0 Dec 02 14:40:58 crc kubenswrapper[4902]: I1202 14:40:58.003988 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" event={"ID":"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d","Type":"ContainerDied","Data":"5921f89f12ce304657ea23c25351e513f2eb20f4c9f9741949985e155508ef6c"} Dec 02 14:40:59 crc kubenswrapper[4902]: I1202 14:40:59.515413 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:40:59 crc kubenswrapper[4902]: I1202 14:40:59.599779 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-ssh-key\") pod \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " Dec 02 14:40:59 crc kubenswrapper[4902]: I1202 14:40:59.600042 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-repo-setup-combined-ca-bundle\") pod \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " Dec 02 14:40:59 crc kubenswrapper[4902]: I1202 14:40:59.600097 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-597kp\" (UniqueName: \"kubernetes.io/projected/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-kube-api-access-597kp\") pod \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " Dec 02 14:40:59 crc kubenswrapper[4902]: I1202 14:40:59.600232 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-inventory\") pod \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\" (UID: \"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d\") " Dec 02 14:40:59 crc kubenswrapper[4902]: I1202 14:40:59.617875 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "b851cac6-d9e2-4a83-92d8-dbd09ee7e38d" (UID: "b851cac6-d9e2-4a83-92d8-dbd09ee7e38d"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:40:59 crc kubenswrapper[4902]: I1202 14:40:59.617933 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-kube-api-access-597kp" (OuterVolumeSpecName: "kube-api-access-597kp") pod "b851cac6-d9e2-4a83-92d8-dbd09ee7e38d" (UID: "b851cac6-d9e2-4a83-92d8-dbd09ee7e38d"). InnerVolumeSpecName "kube-api-access-597kp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:40:59 crc kubenswrapper[4902]: I1202 14:40:59.634105 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-inventory" (OuterVolumeSpecName: "inventory") pod "b851cac6-d9e2-4a83-92d8-dbd09ee7e38d" (UID: "b851cac6-d9e2-4a83-92d8-dbd09ee7e38d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:40:59 crc kubenswrapper[4902]: I1202 14:40:59.646360 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b851cac6-d9e2-4a83-92d8-dbd09ee7e38d" (UID: "b851cac6-d9e2-4a83-92d8-dbd09ee7e38d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:40:59 crc kubenswrapper[4902]: I1202 14:40:59.704130 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:59 crc kubenswrapper[4902]: I1202 14:40:59.704196 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:59 crc kubenswrapper[4902]: I1202 14:40:59.704213 4902 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:40:59 crc kubenswrapper[4902]: I1202 14:40:59.704231 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-597kp\" (UniqueName: \"kubernetes.io/projected/b851cac6-d9e2-4a83-92d8-dbd09ee7e38d-kube-api-access-597kp\") on node \"crc\" DevicePath \"\"" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.031545 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" event={"ID":"b851cac6-d9e2-4a83-92d8-dbd09ee7e38d","Type":"ContainerDied","Data":"a31cf46635c21dda1f8649d1a98a4837c95788de6ab26364d4a5dd4aca9eac68"} Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.032285 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a31cf46635c21dda1f8649d1a98a4837c95788de6ab26364d4a5dd4aca9eac68" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.031673 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.137599 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc"] Dec 02 14:41:00 crc kubenswrapper[4902]: E1202 14:41:00.138210 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b851cac6-d9e2-4a83-92d8-dbd09ee7e38d" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.138239 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="b851cac6-d9e2-4a83-92d8-dbd09ee7e38d" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.138558 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="b851cac6-d9e2-4a83-92d8-dbd09ee7e38d" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.139496 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.141711 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.141868 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.142225 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.142724 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.163739 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc"] Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.213997 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csfgb\" (UniqueName: \"kubernetes.io/projected/4c0e3211-2564-459f-b072-c6d07ac1da5c-kube-api-access-csfgb\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-v95mc\" (UID: \"4c0e3211-2564-459f-b072-c6d07ac1da5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.214086 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c0e3211-2564-459f-b072-c6d07ac1da5c-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-v95mc\" (UID: \"4c0e3211-2564-459f-b072-c6d07ac1da5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.214156 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c0e3211-2564-459f-b072-c6d07ac1da5c-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-v95mc\" (UID: \"4c0e3211-2564-459f-b072-c6d07ac1da5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.321078 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csfgb\" (UniqueName: \"kubernetes.io/projected/4c0e3211-2564-459f-b072-c6d07ac1da5c-kube-api-access-csfgb\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-v95mc\" (UID: \"4c0e3211-2564-459f-b072-c6d07ac1da5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.321144 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c0e3211-2564-459f-b072-c6d07ac1da5c-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-v95mc\" (UID: \"4c0e3211-2564-459f-b072-c6d07ac1da5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.321178 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c0e3211-2564-459f-b072-c6d07ac1da5c-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-v95mc\" (UID: \"4c0e3211-2564-459f-b072-c6d07ac1da5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.324925 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c0e3211-2564-459f-b072-c6d07ac1da5c-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-v95mc\" (UID: \"4c0e3211-2564-459f-b072-c6d07ac1da5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.325434 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c0e3211-2564-459f-b072-c6d07ac1da5c-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-v95mc\" (UID: \"4c0e3211-2564-459f-b072-c6d07ac1da5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.339719 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csfgb\" (UniqueName: \"kubernetes.io/projected/4c0e3211-2564-459f-b072-c6d07ac1da5c-kube-api-access-csfgb\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-v95mc\" (UID: \"4c0e3211-2564-459f-b072-c6d07ac1da5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" Dec 02 14:41:00 crc kubenswrapper[4902]: I1202 14:41:00.459762 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" Dec 02 14:41:01 crc kubenswrapper[4902]: I1202 14:41:01.078843 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc"] Dec 02 14:41:01 crc kubenswrapper[4902]: W1202 14:41:01.087551 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c0e3211_2564_459f_b072_c6d07ac1da5c.slice/crio-8a120d1bb70205200e40e49b7bf4e4e530d29711fe4ca17a8a8fc8329b1f8178 WatchSource:0}: Error finding container 8a120d1bb70205200e40e49b7bf4e4e530d29711fe4ca17a8a8fc8329b1f8178: Status 404 returned error can't find the container with id 8a120d1bb70205200e40e49b7bf4e4e530d29711fe4ca17a8a8fc8329b1f8178 Dec 02 14:41:02 crc kubenswrapper[4902]: I1202 14:41:02.052461 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" event={"ID":"4c0e3211-2564-459f-b072-c6d07ac1da5c","Type":"ContainerStarted","Data":"8a120d1bb70205200e40e49b7bf4e4e530d29711fe4ca17a8a8fc8329b1f8178"} Dec 02 14:41:03 crc kubenswrapper[4902]: I1202 14:41:03.068736 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" event={"ID":"4c0e3211-2564-459f-b072-c6d07ac1da5c","Type":"ContainerStarted","Data":"c355fc659a158a8a4270b0ba52706253d128aecb7aaf52eda8343fcad8e9aef0"} Dec 02 14:41:03 crc kubenswrapper[4902]: I1202 14:41:03.099305 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" podStartSLOduration=2.36384446 podStartE2EDuration="3.099282625s" podCreationTimestamp="2025-12-02 14:41:00 +0000 UTC" firstStartedPulling="2025-12-02 14:41:01.090839405 +0000 UTC m=+1492.282148114" lastFinishedPulling="2025-12-02 14:41:01.82627756 +0000 UTC m=+1493.017586279" observedRunningTime="2025-12-02 14:41:03.089316742 +0000 UTC m=+1494.280625471" watchObservedRunningTime="2025-12-02 14:41:03.099282625 +0000 UTC m=+1494.290591344" Dec 02 14:41:04 crc kubenswrapper[4902]: I1202 14:41:04.731965 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:41:04 crc kubenswrapper[4902]: I1202 14:41:04.732396 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:41:04 crc kubenswrapper[4902]: I1202 14:41:04.732455 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:41:04 crc kubenswrapper[4902]: I1202 14:41:04.733355 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2cb387f91a89f081e93103a087cb756dd66d0727efc64f860756ae59c75099f5"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 14:41:04 crc kubenswrapper[4902]: I1202 14:41:04.733428 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://2cb387f91a89f081e93103a087cb756dd66d0727efc64f860756ae59c75099f5" gracePeriod=600 Dec 02 14:41:05 crc kubenswrapper[4902]: I1202 14:41:05.098870 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="2cb387f91a89f081e93103a087cb756dd66d0727efc64f860756ae59c75099f5" exitCode=0 Dec 02 14:41:05 crc kubenswrapper[4902]: I1202 14:41:05.099008 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"2cb387f91a89f081e93103a087cb756dd66d0727efc64f860756ae59c75099f5"} Dec 02 14:41:05 crc kubenswrapper[4902]: I1202 14:41:05.099110 4902 scope.go:117] "RemoveContainer" containerID="35031c6ab93a7172db862ff9a8aff08493444d048e85d1cd7f7711172563a23f" Dec 02 14:41:05 crc kubenswrapper[4902]: I1202 14:41:05.102756 4902 generic.go:334] "Generic (PLEG): container finished" podID="4c0e3211-2564-459f-b072-c6d07ac1da5c" containerID="c355fc659a158a8a4270b0ba52706253d128aecb7aaf52eda8343fcad8e9aef0" exitCode=0 Dec 02 14:41:05 crc kubenswrapper[4902]: I1202 14:41:05.102806 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" event={"ID":"4c0e3211-2564-459f-b072-c6d07ac1da5c","Type":"ContainerDied","Data":"c355fc659a158a8a4270b0ba52706253d128aecb7aaf52eda8343fcad8e9aef0"} Dec 02 14:41:06 crc kubenswrapper[4902]: I1202 14:41:06.117719 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f"} Dec 02 14:41:06 crc kubenswrapper[4902]: I1202 14:41:06.664136 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" Dec 02 14:41:06 crc kubenswrapper[4902]: I1202 14:41:06.846337 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c0e3211-2564-459f-b072-c6d07ac1da5c-inventory\") pod \"4c0e3211-2564-459f-b072-c6d07ac1da5c\" (UID: \"4c0e3211-2564-459f-b072-c6d07ac1da5c\") " Dec 02 14:41:06 crc kubenswrapper[4902]: I1202 14:41:06.846488 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c0e3211-2564-459f-b072-c6d07ac1da5c-ssh-key\") pod \"4c0e3211-2564-459f-b072-c6d07ac1da5c\" (UID: \"4c0e3211-2564-459f-b072-c6d07ac1da5c\") " Dec 02 14:41:06 crc kubenswrapper[4902]: I1202 14:41:06.846632 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csfgb\" (UniqueName: \"kubernetes.io/projected/4c0e3211-2564-459f-b072-c6d07ac1da5c-kube-api-access-csfgb\") pod \"4c0e3211-2564-459f-b072-c6d07ac1da5c\" (UID: \"4c0e3211-2564-459f-b072-c6d07ac1da5c\") " Dec 02 14:41:06 crc kubenswrapper[4902]: I1202 14:41:06.851991 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c0e3211-2564-459f-b072-c6d07ac1da5c-kube-api-access-csfgb" (OuterVolumeSpecName: "kube-api-access-csfgb") pod "4c0e3211-2564-459f-b072-c6d07ac1da5c" (UID: "4c0e3211-2564-459f-b072-c6d07ac1da5c"). InnerVolumeSpecName "kube-api-access-csfgb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:41:06 crc kubenswrapper[4902]: I1202 14:41:06.887277 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c0e3211-2564-459f-b072-c6d07ac1da5c-inventory" (OuterVolumeSpecName: "inventory") pod "4c0e3211-2564-459f-b072-c6d07ac1da5c" (UID: "4c0e3211-2564-459f-b072-c6d07ac1da5c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:41:06 crc kubenswrapper[4902]: I1202 14:41:06.894372 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c0e3211-2564-459f-b072-c6d07ac1da5c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4c0e3211-2564-459f-b072-c6d07ac1da5c" (UID: "4c0e3211-2564-459f-b072-c6d07ac1da5c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:41:06 crc kubenswrapper[4902]: I1202 14:41:06.949550 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c0e3211-2564-459f-b072-c6d07ac1da5c-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 14:41:06 crc kubenswrapper[4902]: I1202 14:41:06.949610 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c0e3211-2564-459f-b072-c6d07ac1da5c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:41:06 crc kubenswrapper[4902]: I1202 14:41:06.949621 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csfgb\" (UniqueName: \"kubernetes.io/projected/4c0e3211-2564-459f-b072-c6d07ac1da5c-kube-api-access-csfgb\") on node \"crc\" DevicePath \"\"" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.128426 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" event={"ID":"4c0e3211-2564-459f-b072-c6d07ac1da5c","Type":"ContainerDied","Data":"8a120d1bb70205200e40e49b7bf4e4e530d29711fe4ca17a8a8fc8329b1f8178"} Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.128477 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a120d1bb70205200e40e49b7bf4e4e530d29711fe4ca17a8a8fc8329b1f8178" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.128445 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-v95mc" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.257159 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b"] Dec 02 14:41:07 crc kubenswrapper[4902]: E1202 14:41:07.257986 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c0e3211-2564-459f-b072-c6d07ac1da5c" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.258010 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c0e3211-2564-459f-b072-c6d07ac1da5c" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.258278 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c0e3211-2564-459f-b072-c6d07ac1da5c" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.259137 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.261917 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.262039 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.262127 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.262300 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.280992 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b"] Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.458665 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.458735 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.459436 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlxq9\" (UniqueName: \"kubernetes.io/projected/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-kube-api-access-tlxq9\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.459638 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.561284 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlxq9\" (UniqueName: \"kubernetes.io/projected/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-kube-api-access-tlxq9\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.561340 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.561432 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.561477 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.569080 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.569797 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.572351 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.599139 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlxq9\" (UniqueName: \"kubernetes.io/projected/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-kube-api-access-tlxq9\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:41:07 crc kubenswrapper[4902]: I1202 14:41:07.897848 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:41:08 crc kubenswrapper[4902]: I1202 14:41:08.439842 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b"] Dec 02 14:41:09 crc kubenswrapper[4902]: I1202 14:41:09.126757 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:41:09 crc kubenswrapper[4902]: I1202 14:41:09.165533 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" event={"ID":"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02","Type":"ContainerStarted","Data":"ab799b4d3fc3d11cbb1db5c77b00b947298130b113a4c1d9b0094d24e043270b"} Dec 02 14:41:10 crc kubenswrapper[4902]: I1202 14:41:10.191091 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" event={"ID":"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02","Type":"ContainerStarted","Data":"697b7f36880d0e715b893fe4e10fac7c1bc305107cd6362ed426edac32623e82"} Dec 02 14:41:10 crc kubenswrapper[4902]: I1202 14:41:10.217447 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" podStartSLOduration=2.5327506680000003 podStartE2EDuration="3.217432501s" podCreationTimestamp="2025-12-02 14:41:07 +0000 UTC" firstStartedPulling="2025-12-02 14:41:08.436291052 +0000 UTC m=+1499.627599761" lastFinishedPulling="2025-12-02 14:41:09.120972885 +0000 UTC m=+1500.312281594" observedRunningTime="2025-12-02 14:41:10.214160208 +0000 UTC m=+1501.405468907" watchObservedRunningTime="2025-12-02 14:41:10.217432501 +0000 UTC m=+1501.408741210" Dec 02 14:41:28 crc kubenswrapper[4902]: I1202 14:41:28.997630 4902 scope.go:117] "RemoveContainer" containerID="ee207fc82dd5cd23b5d3f234d7a1114e6bfdb2d448eaadb7f8f6b4158b2f6e12" Dec 02 14:42:20 crc kubenswrapper[4902]: I1202 14:42:20.654136 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-j4x8j"] Dec 02 14:42:20 crc kubenswrapper[4902]: I1202 14:42:20.658875 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:20 crc kubenswrapper[4902]: I1202 14:42:20.672244 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j4x8j"] Dec 02 14:42:20 crc kubenswrapper[4902]: I1202 14:42:20.720021 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htp5m\" (UniqueName: \"kubernetes.io/projected/201b5928-82e8-4585-8e0a-aa737dfadaf2-kube-api-access-htp5m\") pod \"redhat-marketplace-j4x8j\" (UID: \"201b5928-82e8-4585-8e0a-aa737dfadaf2\") " pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:20 crc kubenswrapper[4902]: I1202 14:42:20.720147 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/201b5928-82e8-4585-8e0a-aa737dfadaf2-catalog-content\") pod \"redhat-marketplace-j4x8j\" (UID: \"201b5928-82e8-4585-8e0a-aa737dfadaf2\") " pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:20 crc kubenswrapper[4902]: I1202 14:42:20.720197 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/201b5928-82e8-4585-8e0a-aa737dfadaf2-utilities\") pod \"redhat-marketplace-j4x8j\" (UID: \"201b5928-82e8-4585-8e0a-aa737dfadaf2\") " pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:20 crc kubenswrapper[4902]: I1202 14:42:20.821931 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/201b5928-82e8-4585-8e0a-aa737dfadaf2-catalog-content\") pod \"redhat-marketplace-j4x8j\" (UID: \"201b5928-82e8-4585-8e0a-aa737dfadaf2\") " pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:20 crc kubenswrapper[4902]: I1202 14:42:20.822010 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/201b5928-82e8-4585-8e0a-aa737dfadaf2-utilities\") pod \"redhat-marketplace-j4x8j\" (UID: \"201b5928-82e8-4585-8e0a-aa737dfadaf2\") " pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:20 crc kubenswrapper[4902]: I1202 14:42:20.822062 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htp5m\" (UniqueName: \"kubernetes.io/projected/201b5928-82e8-4585-8e0a-aa737dfadaf2-kube-api-access-htp5m\") pod \"redhat-marketplace-j4x8j\" (UID: \"201b5928-82e8-4585-8e0a-aa737dfadaf2\") " pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:20 crc kubenswrapper[4902]: I1202 14:42:20.822811 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/201b5928-82e8-4585-8e0a-aa737dfadaf2-catalog-content\") pod \"redhat-marketplace-j4x8j\" (UID: \"201b5928-82e8-4585-8e0a-aa737dfadaf2\") " pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:20 crc kubenswrapper[4902]: I1202 14:42:20.823018 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/201b5928-82e8-4585-8e0a-aa737dfadaf2-utilities\") pod \"redhat-marketplace-j4x8j\" (UID: \"201b5928-82e8-4585-8e0a-aa737dfadaf2\") " pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:20 crc kubenswrapper[4902]: I1202 14:42:20.856795 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htp5m\" (UniqueName: \"kubernetes.io/projected/201b5928-82e8-4585-8e0a-aa737dfadaf2-kube-api-access-htp5m\") pod \"redhat-marketplace-j4x8j\" (UID: \"201b5928-82e8-4585-8e0a-aa737dfadaf2\") " pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:20 crc kubenswrapper[4902]: I1202 14:42:20.999237 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:21 crc kubenswrapper[4902]: I1202 14:42:21.495827 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j4x8j"] Dec 02 14:42:22 crc kubenswrapper[4902]: I1202 14:42:22.176106 4902 generic.go:334] "Generic (PLEG): container finished" podID="201b5928-82e8-4585-8e0a-aa737dfadaf2" containerID="b46f3c31dd76af6b68d58a835e2512f85cb4424e754669aa30fa5a8798c5a323" exitCode=0 Dec 02 14:42:22 crc kubenswrapper[4902]: I1202 14:42:22.176150 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j4x8j" event={"ID":"201b5928-82e8-4585-8e0a-aa737dfadaf2","Type":"ContainerDied","Data":"b46f3c31dd76af6b68d58a835e2512f85cb4424e754669aa30fa5a8798c5a323"} Dec 02 14:42:22 crc kubenswrapper[4902]: I1202 14:42:22.176178 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j4x8j" event={"ID":"201b5928-82e8-4585-8e0a-aa737dfadaf2","Type":"ContainerStarted","Data":"bba74a499083656e8f0dcd7e2c6bbe55c1e03a2014d3d014c625a94ea09c0853"} Dec 02 14:42:24 crc kubenswrapper[4902]: I1202 14:42:24.214484 4902 generic.go:334] "Generic (PLEG): container finished" podID="201b5928-82e8-4585-8e0a-aa737dfadaf2" containerID="67e564857b4a0d3b59bd6058af557a3e1d10762712c0010162d2691d7f9bab27" exitCode=0 Dec 02 14:42:24 crc kubenswrapper[4902]: I1202 14:42:24.214639 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j4x8j" event={"ID":"201b5928-82e8-4585-8e0a-aa737dfadaf2","Type":"ContainerDied","Data":"67e564857b4a0d3b59bd6058af557a3e1d10762712c0010162d2691d7f9bab27"} Dec 02 14:42:25 crc kubenswrapper[4902]: I1202 14:42:25.235897 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j4x8j" event={"ID":"201b5928-82e8-4585-8e0a-aa737dfadaf2","Type":"ContainerStarted","Data":"7c27716c219984fa6a2f37e10f27dc2f9ada3941c5c855603599cd18324669b7"} Dec 02 14:42:25 crc kubenswrapper[4902]: I1202 14:42:25.266165 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-j4x8j" podStartSLOduration=2.65261572 podStartE2EDuration="5.26614128s" podCreationTimestamp="2025-12-02 14:42:20 +0000 UTC" firstStartedPulling="2025-12-02 14:42:22.178680951 +0000 UTC m=+1573.369989670" lastFinishedPulling="2025-12-02 14:42:24.792206521 +0000 UTC m=+1575.983515230" observedRunningTime="2025-12-02 14:42:25.261182509 +0000 UTC m=+1576.452491248" watchObservedRunningTime="2025-12-02 14:42:25.26614128 +0000 UTC m=+1576.457450029" Dec 02 14:42:27 crc kubenswrapper[4902]: I1202 14:42:27.056406 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s6rfk"] Dec 02 14:42:27 crc kubenswrapper[4902]: I1202 14:42:27.059032 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s6rfk"] Dec 02 14:42:27 crc kubenswrapper[4902]: I1202 14:42:27.060400 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:27 crc kubenswrapper[4902]: I1202 14:42:27.161995 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68b98230-c14a-40bc-97f5-09176caa7846-catalog-content\") pod \"certified-operators-s6rfk\" (UID: \"68b98230-c14a-40bc-97f5-09176caa7846\") " pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:27 crc kubenswrapper[4902]: I1202 14:42:27.162051 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c864\" (UniqueName: \"kubernetes.io/projected/68b98230-c14a-40bc-97f5-09176caa7846-kube-api-access-4c864\") pod \"certified-operators-s6rfk\" (UID: \"68b98230-c14a-40bc-97f5-09176caa7846\") " pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:27 crc kubenswrapper[4902]: I1202 14:42:27.162112 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68b98230-c14a-40bc-97f5-09176caa7846-utilities\") pod \"certified-operators-s6rfk\" (UID: \"68b98230-c14a-40bc-97f5-09176caa7846\") " pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:27 crc kubenswrapper[4902]: I1202 14:42:27.263869 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68b98230-c14a-40bc-97f5-09176caa7846-catalog-content\") pod \"certified-operators-s6rfk\" (UID: \"68b98230-c14a-40bc-97f5-09176caa7846\") " pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:27 crc kubenswrapper[4902]: I1202 14:42:27.263916 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c864\" (UniqueName: \"kubernetes.io/projected/68b98230-c14a-40bc-97f5-09176caa7846-kube-api-access-4c864\") pod \"certified-operators-s6rfk\" (UID: \"68b98230-c14a-40bc-97f5-09176caa7846\") " pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:27 crc kubenswrapper[4902]: I1202 14:42:27.263980 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68b98230-c14a-40bc-97f5-09176caa7846-utilities\") pod \"certified-operators-s6rfk\" (UID: \"68b98230-c14a-40bc-97f5-09176caa7846\") " pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:27 crc kubenswrapper[4902]: I1202 14:42:27.264432 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68b98230-c14a-40bc-97f5-09176caa7846-catalog-content\") pod \"certified-operators-s6rfk\" (UID: \"68b98230-c14a-40bc-97f5-09176caa7846\") " pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:27 crc kubenswrapper[4902]: I1202 14:42:27.264509 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68b98230-c14a-40bc-97f5-09176caa7846-utilities\") pod \"certified-operators-s6rfk\" (UID: \"68b98230-c14a-40bc-97f5-09176caa7846\") " pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:27 crc kubenswrapper[4902]: I1202 14:42:27.288661 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c864\" (UniqueName: \"kubernetes.io/projected/68b98230-c14a-40bc-97f5-09176caa7846-kube-api-access-4c864\") pod \"certified-operators-s6rfk\" (UID: \"68b98230-c14a-40bc-97f5-09176caa7846\") " pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:27 crc kubenswrapper[4902]: I1202 14:42:27.407331 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:27 crc kubenswrapper[4902]: I1202 14:42:27.917715 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s6rfk"] Dec 02 14:42:27 crc kubenswrapper[4902]: W1202 14:42:27.920915 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68b98230_c14a_40bc_97f5_09176caa7846.slice/crio-95a6a8cc8d47645c0bf6c3a21b53f754214223da7669377bbc06edfa7616fab0 WatchSource:0}: Error finding container 95a6a8cc8d47645c0bf6c3a21b53f754214223da7669377bbc06edfa7616fab0: Status 404 returned error can't find the container with id 95a6a8cc8d47645c0bf6c3a21b53f754214223da7669377bbc06edfa7616fab0 Dec 02 14:42:28 crc kubenswrapper[4902]: I1202 14:42:28.265592 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6rfk" event={"ID":"68b98230-c14a-40bc-97f5-09176caa7846","Type":"ContainerStarted","Data":"95a6a8cc8d47645c0bf6c3a21b53f754214223da7669377bbc06edfa7616fab0"} Dec 02 14:42:29 crc kubenswrapper[4902]: I1202 14:42:29.095588 4902 scope.go:117] "RemoveContainer" containerID="8b6b40627f0d46cc07acf520db3c4d594a7a2b05ef5e8024f4d7fdd316861455" Dec 02 14:42:29 crc kubenswrapper[4902]: I1202 14:42:29.280919 4902 generic.go:334] "Generic (PLEG): container finished" podID="68b98230-c14a-40bc-97f5-09176caa7846" containerID="e3894ed3c2ab690324f1c44a3027c5ac692d8416a36a16555e8ec491dd0feecc" exitCode=0 Dec 02 14:42:29 crc kubenswrapper[4902]: I1202 14:42:29.281000 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6rfk" event={"ID":"68b98230-c14a-40bc-97f5-09176caa7846","Type":"ContainerDied","Data":"e3894ed3c2ab690324f1c44a3027c5ac692d8416a36a16555e8ec491dd0feecc"} Dec 02 14:42:31 crc kubenswrapper[4902]: I1202 14:42:31.000344 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:31 crc kubenswrapper[4902]: I1202 14:42:31.001095 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:31 crc kubenswrapper[4902]: I1202 14:42:31.052791 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:31 crc kubenswrapper[4902]: I1202 14:42:31.303144 4902 generic.go:334] "Generic (PLEG): container finished" podID="68b98230-c14a-40bc-97f5-09176caa7846" containerID="112a57f249e48cc7043b0007fe1dba5c7b04ccd5db841aadcb987bef827585de" exitCode=0 Dec 02 14:42:31 crc kubenswrapper[4902]: I1202 14:42:31.303223 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6rfk" event={"ID":"68b98230-c14a-40bc-97f5-09176caa7846","Type":"ContainerDied","Data":"112a57f249e48cc7043b0007fe1dba5c7b04ccd5db841aadcb987bef827585de"} Dec 02 14:42:31 crc kubenswrapper[4902]: I1202 14:42:31.358874 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:32 crc kubenswrapper[4902]: I1202 14:42:32.326954 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6rfk" event={"ID":"68b98230-c14a-40bc-97f5-09176caa7846","Type":"ContainerStarted","Data":"5343bd24642bd74c9ed33519c2b8c23be30b30af1c53215bfa94e49edd5ef5da"} Dec 02 14:42:32 crc kubenswrapper[4902]: I1202 14:42:32.355215 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s6rfk" podStartSLOduration=3.885592961 podStartE2EDuration="6.355194192s" podCreationTimestamp="2025-12-02 14:42:26 +0000 UTC" firstStartedPulling="2025-12-02 14:42:29.28275484 +0000 UTC m=+1580.474063589" lastFinishedPulling="2025-12-02 14:42:31.752356111 +0000 UTC m=+1582.943664820" observedRunningTime="2025-12-02 14:42:32.348299786 +0000 UTC m=+1583.539608535" watchObservedRunningTime="2025-12-02 14:42:32.355194192 +0000 UTC m=+1583.546502911" Dec 02 14:42:33 crc kubenswrapper[4902]: I1202 14:42:33.008074 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j4x8j"] Dec 02 14:42:34 crc kubenswrapper[4902]: I1202 14:42:34.351270 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-j4x8j" podUID="201b5928-82e8-4585-8e0a-aa737dfadaf2" containerName="registry-server" containerID="cri-o://7c27716c219984fa6a2f37e10f27dc2f9ada3941c5c855603599cd18324669b7" gracePeriod=2 Dec 02 14:42:34 crc kubenswrapper[4902]: I1202 14:42:34.856795 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:34 crc kubenswrapper[4902]: I1202 14:42:34.946482 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/201b5928-82e8-4585-8e0a-aa737dfadaf2-utilities\") pod \"201b5928-82e8-4585-8e0a-aa737dfadaf2\" (UID: \"201b5928-82e8-4585-8e0a-aa737dfadaf2\") " Dec 02 14:42:34 crc kubenswrapper[4902]: I1202 14:42:34.946820 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/201b5928-82e8-4585-8e0a-aa737dfadaf2-catalog-content\") pod \"201b5928-82e8-4585-8e0a-aa737dfadaf2\" (UID: \"201b5928-82e8-4585-8e0a-aa737dfadaf2\") " Dec 02 14:42:34 crc kubenswrapper[4902]: I1202 14:42:34.947051 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htp5m\" (UniqueName: \"kubernetes.io/projected/201b5928-82e8-4585-8e0a-aa737dfadaf2-kube-api-access-htp5m\") pod \"201b5928-82e8-4585-8e0a-aa737dfadaf2\" (UID: \"201b5928-82e8-4585-8e0a-aa737dfadaf2\") " Dec 02 14:42:34 crc kubenswrapper[4902]: I1202 14:42:34.948803 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/201b5928-82e8-4585-8e0a-aa737dfadaf2-utilities" (OuterVolumeSpecName: "utilities") pod "201b5928-82e8-4585-8e0a-aa737dfadaf2" (UID: "201b5928-82e8-4585-8e0a-aa737dfadaf2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:42:34 crc kubenswrapper[4902]: I1202 14:42:34.957670 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/201b5928-82e8-4585-8e0a-aa737dfadaf2-kube-api-access-htp5m" (OuterVolumeSpecName: "kube-api-access-htp5m") pod "201b5928-82e8-4585-8e0a-aa737dfadaf2" (UID: "201b5928-82e8-4585-8e0a-aa737dfadaf2"). InnerVolumeSpecName "kube-api-access-htp5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:42:34 crc kubenswrapper[4902]: I1202 14:42:34.966912 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/201b5928-82e8-4585-8e0a-aa737dfadaf2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "201b5928-82e8-4585-8e0a-aa737dfadaf2" (UID: "201b5928-82e8-4585-8e0a-aa737dfadaf2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.048677 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htp5m\" (UniqueName: \"kubernetes.io/projected/201b5928-82e8-4585-8e0a-aa737dfadaf2-kube-api-access-htp5m\") on node \"crc\" DevicePath \"\"" Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.048712 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/201b5928-82e8-4585-8e0a-aa737dfadaf2-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.048723 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/201b5928-82e8-4585-8e0a-aa737dfadaf2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.367592 4902 generic.go:334] "Generic (PLEG): container finished" podID="201b5928-82e8-4585-8e0a-aa737dfadaf2" containerID="7c27716c219984fa6a2f37e10f27dc2f9ada3941c5c855603599cd18324669b7" exitCode=0 Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.367681 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j4x8j" event={"ID":"201b5928-82e8-4585-8e0a-aa737dfadaf2","Type":"ContainerDied","Data":"7c27716c219984fa6a2f37e10f27dc2f9ada3941c5c855603599cd18324669b7"} Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.367734 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j4x8j" Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.367764 4902 scope.go:117] "RemoveContainer" containerID="7c27716c219984fa6a2f37e10f27dc2f9ada3941c5c855603599cd18324669b7" Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.367748 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j4x8j" event={"ID":"201b5928-82e8-4585-8e0a-aa737dfadaf2","Type":"ContainerDied","Data":"bba74a499083656e8f0dcd7e2c6bbe55c1e03a2014d3d014c625a94ea09c0853"} Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.419968 4902 scope.go:117] "RemoveContainer" containerID="67e564857b4a0d3b59bd6058af557a3e1d10762712c0010162d2691d7f9bab27" Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.427792 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j4x8j"] Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.447213 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-j4x8j"] Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.453627 4902 scope.go:117] "RemoveContainer" containerID="b46f3c31dd76af6b68d58a835e2512f85cb4424e754669aa30fa5a8798c5a323" Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.511509 4902 scope.go:117] "RemoveContainer" containerID="7c27716c219984fa6a2f37e10f27dc2f9ada3941c5c855603599cd18324669b7" Dec 02 14:42:35 crc kubenswrapper[4902]: E1202 14:42:35.512110 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c27716c219984fa6a2f37e10f27dc2f9ada3941c5c855603599cd18324669b7\": container with ID starting with 7c27716c219984fa6a2f37e10f27dc2f9ada3941c5c855603599cd18324669b7 not found: ID does not exist" containerID="7c27716c219984fa6a2f37e10f27dc2f9ada3941c5c855603599cd18324669b7" Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.512229 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c27716c219984fa6a2f37e10f27dc2f9ada3941c5c855603599cd18324669b7"} err="failed to get container status \"7c27716c219984fa6a2f37e10f27dc2f9ada3941c5c855603599cd18324669b7\": rpc error: code = NotFound desc = could not find container \"7c27716c219984fa6a2f37e10f27dc2f9ada3941c5c855603599cd18324669b7\": container with ID starting with 7c27716c219984fa6a2f37e10f27dc2f9ada3941c5c855603599cd18324669b7 not found: ID does not exist" Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.512304 4902 scope.go:117] "RemoveContainer" containerID="67e564857b4a0d3b59bd6058af557a3e1d10762712c0010162d2691d7f9bab27" Dec 02 14:42:35 crc kubenswrapper[4902]: E1202 14:42:35.512935 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67e564857b4a0d3b59bd6058af557a3e1d10762712c0010162d2691d7f9bab27\": container with ID starting with 67e564857b4a0d3b59bd6058af557a3e1d10762712c0010162d2691d7f9bab27 not found: ID does not exist" containerID="67e564857b4a0d3b59bd6058af557a3e1d10762712c0010162d2691d7f9bab27" Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.512996 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67e564857b4a0d3b59bd6058af557a3e1d10762712c0010162d2691d7f9bab27"} err="failed to get container status \"67e564857b4a0d3b59bd6058af557a3e1d10762712c0010162d2691d7f9bab27\": rpc error: code = NotFound desc = could not find container \"67e564857b4a0d3b59bd6058af557a3e1d10762712c0010162d2691d7f9bab27\": container with ID starting with 67e564857b4a0d3b59bd6058af557a3e1d10762712c0010162d2691d7f9bab27 not found: ID does not exist" Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.513039 4902 scope.go:117] "RemoveContainer" containerID="b46f3c31dd76af6b68d58a835e2512f85cb4424e754669aa30fa5a8798c5a323" Dec 02 14:42:35 crc kubenswrapper[4902]: E1202 14:42:35.515068 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b46f3c31dd76af6b68d58a835e2512f85cb4424e754669aa30fa5a8798c5a323\": container with ID starting with b46f3c31dd76af6b68d58a835e2512f85cb4424e754669aa30fa5a8798c5a323 not found: ID does not exist" containerID="b46f3c31dd76af6b68d58a835e2512f85cb4424e754669aa30fa5a8798c5a323" Dec 02 14:42:35 crc kubenswrapper[4902]: I1202 14:42:35.515124 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b46f3c31dd76af6b68d58a835e2512f85cb4424e754669aa30fa5a8798c5a323"} err="failed to get container status \"b46f3c31dd76af6b68d58a835e2512f85cb4424e754669aa30fa5a8798c5a323\": rpc error: code = NotFound desc = could not find container \"b46f3c31dd76af6b68d58a835e2512f85cb4424e754669aa30fa5a8798c5a323\": container with ID starting with b46f3c31dd76af6b68d58a835e2512f85cb4424e754669aa30fa5a8798c5a323 not found: ID does not exist" Dec 02 14:42:37 crc kubenswrapper[4902]: I1202 14:42:37.119231 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="201b5928-82e8-4585-8e0a-aa737dfadaf2" path="/var/lib/kubelet/pods/201b5928-82e8-4585-8e0a-aa737dfadaf2/volumes" Dec 02 14:42:37 crc kubenswrapper[4902]: I1202 14:42:37.408212 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:37 crc kubenswrapper[4902]: I1202 14:42:37.408393 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:37 crc kubenswrapper[4902]: I1202 14:42:37.484489 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:38 crc kubenswrapper[4902]: I1202 14:42:38.490369 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:39 crc kubenswrapper[4902]: I1202 14:42:39.407298 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s6rfk"] Dec 02 14:42:40 crc kubenswrapper[4902]: I1202 14:42:40.425517 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s6rfk" podUID="68b98230-c14a-40bc-97f5-09176caa7846" containerName="registry-server" containerID="cri-o://5343bd24642bd74c9ed33519c2b8c23be30b30af1c53215bfa94e49edd5ef5da" gracePeriod=2 Dec 02 14:42:40 crc kubenswrapper[4902]: E1202 14:42:40.516884 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68b98230_c14a_40bc_97f5_09176caa7846.slice/crio-5343bd24642bd74c9ed33519c2b8c23be30b30af1c53215bfa94e49edd5ef5da.scope\": RecentStats: unable to find data in memory cache]" Dec 02 14:42:41 crc kubenswrapper[4902]: I1202 14:42:41.443005 4902 generic.go:334] "Generic (PLEG): container finished" podID="68b98230-c14a-40bc-97f5-09176caa7846" containerID="5343bd24642bd74c9ed33519c2b8c23be30b30af1c53215bfa94e49edd5ef5da" exitCode=0 Dec 02 14:42:41 crc kubenswrapper[4902]: I1202 14:42:41.443136 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6rfk" event={"ID":"68b98230-c14a-40bc-97f5-09176caa7846","Type":"ContainerDied","Data":"5343bd24642bd74c9ed33519c2b8c23be30b30af1c53215bfa94e49edd5ef5da"} Dec 02 14:42:41 crc kubenswrapper[4902]: I1202 14:42:41.779660 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:41 crc kubenswrapper[4902]: I1202 14:42:41.785484 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68b98230-c14a-40bc-97f5-09176caa7846-catalog-content\") pod \"68b98230-c14a-40bc-97f5-09176caa7846\" (UID: \"68b98230-c14a-40bc-97f5-09176caa7846\") " Dec 02 14:42:41 crc kubenswrapper[4902]: I1202 14:42:41.785751 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4c864\" (UniqueName: \"kubernetes.io/projected/68b98230-c14a-40bc-97f5-09176caa7846-kube-api-access-4c864\") pod \"68b98230-c14a-40bc-97f5-09176caa7846\" (UID: \"68b98230-c14a-40bc-97f5-09176caa7846\") " Dec 02 14:42:41 crc kubenswrapper[4902]: I1202 14:42:41.785887 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68b98230-c14a-40bc-97f5-09176caa7846-utilities\") pod \"68b98230-c14a-40bc-97f5-09176caa7846\" (UID: \"68b98230-c14a-40bc-97f5-09176caa7846\") " Dec 02 14:42:41 crc kubenswrapper[4902]: I1202 14:42:41.789063 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68b98230-c14a-40bc-97f5-09176caa7846-utilities" (OuterVolumeSpecName: "utilities") pod "68b98230-c14a-40bc-97f5-09176caa7846" (UID: "68b98230-c14a-40bc-97f5-09176caa7846"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:42:41 crc kubenswrapper[4902]: I1202 14:42:41.791461 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68b98230-c14a-40bc-97f5-09176caa7846-kube-api-access-4c864" (OuterVolumeSpecName: "kube-api-access-4c864") pod "68b98230-c14a-40bc-97f5-09176caa7846" (UID: "68b98230-c14a-40bc-97f5-09176caa7846"). InnerVolumeSpecName "kube-api-access-4c864". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:42:41 crc kubenswrapper[4902]: I1202 14:42:41.879763 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68b98230-c14a-40bc-97f5-09176caa7846-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "68b98230-c14a-40bc-97f5-09176caa7846" (UID: "68b98230-c14a-40bc-97f5-09176caa7846"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:42:41 crc kubenswrapper[4902]: I1202 14:42:41.888359 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68b98230-c14a-40bc-97f5-09176caa7846-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:42:41 crc kubenswrapper[4902]: I1202 14:42:41.888419 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68b98230-c14a-40bc-97f5-09176caa7846-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:42:41 crc kubenswrapper[4902]: I1202 14:42:41.888435 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4c864\" (UniqueName: \"kubernetes.io/projected/68b98230-c14a-40bc-97f5-09176caa7846-kube-api-access-4c864\") on node \"crc\" DevicePath \"\"" Dec 02 14:42:42 crc kubenswrapper[4902]: I1202 14:42:42.463493 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6rfk" event={"ID":"68b98230-c14a-40bc-97f5-09176caa7846","Type":"ContainerDied","Data":"95a6a8cc8d47645c0bf6c3a21b53f754214223da7669377bbc06edfa7616fab0"} Dec 02 14:42:42 crc kubenswrapper[4902]: I1202 14:42:42.463542 4902 scope.go:117] "RemoveContainer" containerID="5343bd24642bd74c9ed33519c2b8c23be30b30af1c53215bfa94e49edd5ef5da" Dec 02 14:42:42 crc kubenswrapper[4902]: I1202 14:42:42.463606 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s6rfk" Dec 02 14:42:42 crc kubenswrapper[4902]: I1202 14:42:42.515447 4902 scope.go:117] "RemoveContainer" containerID="112a57f249e48cc7043b0007fe1dba5c7b04ccd5db841aadcb987bef827585de" Dec 02 14:42:42 crc kubenswrapper[4902]: I1202 14:42:42.531238 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s6rfk"] Dec 02 14:42:42 crc kubenswrapper[4902]: I1202 14:42:42.542902 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s6rfk"] Dec 02 14:42:42 crc kubenswrapper[4902]: I1202 14:42:42.573066 4902 scope.go:117] "RemoveContainer" containerID="e3894ed3c2ab690324f1c44a3027c5ac692d8416a36a16555e8ec491dd0feecc" Dec 02 14:42:43 crc kubenswrapper[4902]: I1202 14:42:43.119853 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68b98230-c14a-40bc-97f5-09176caa7846" path="/var/lib/kubelet/pods/68b98230-c14a-40bc-97f5-09176caa7846/volumes" Dec 02 14:43:34 crc kubenswrapper[4902]: I1202 14:43:34.732232 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:43:34 crc kubenswrapper[4902]: I1202 14:43:34.732992 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.457138 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-t8hnc"] Dec 02 14:43:35 crc kubenswrapper[4902]: E1202 14:43:35.458286 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68b98230-c14a-40bc-97f5-09176caa7846" containerName="extract-utilities" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.458463 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="68b98230-c14a-40bc-97f5-09176caa7846" containerName="extract-utilities" Dec 02 14:43:35 crc kubenswrapper[4902]: E1202 14:43:35.458633 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="201b5928-82e8-4585-8e0a-aa737dfadaf2" containerName="registry-server" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.458801 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="201b5928-82e8-4585-8e0a-aa737dfadaf2" containerName="registry-server" Dec 02 14:43:35 crc kubenswrapper[4902]: E1202 14:43:35.458954 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="201b5928-82e8-4585-8e0a-aa737dfadaf2" containerName="extract-utilities" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.459099 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="201b5928-82e8-4585-8e0a-aa737dfadaf2" containerName="extract-utilities" Dec 02 14:43:35 crc kubenswrapper[4902]: E1202 14:43:35.459255 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="201b5928-82e8-4585-8e0a-aa737dfadaf2" containerName="extract-content" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.459402 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="201b5928-82e8-4585-8e0a-aa737dfadaf2" containerName="extract-content" Dec 02 14:43:35 crc kubenswrapper[4902]: E1202 14:43:35.459528 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68b98230-c14a-40bc-97f5-09176caa7846" containerName="registry-server" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.459682 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="68b98230-c14a-40bc-97f5-09176caa7846" containerName="registry-server" Dec 02 14:43:35 crc kubenswrapper[4902]: E1202 14:43:35.459822 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68b98230-c14a-40bc-97f5-09176caa7846" containerName="extract-content" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.459953 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="68b98230-c14a-40bc-97f5-09176caa7846" containerName="extract-content" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.460430 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="201b5928-82e8-4585-8e0a-aa737dfadaf2" containerName="registry-server" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.460787 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="68b98230-c14a-40bc-97f5-09176caa7846" containerName="registry-server" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.463609 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.472630 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t8hnc"] Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.514711 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dcd9e7e0-afab-4a92-a992-49731b581b0f-catalog-content\") pod \"community-operators-t8hnc\" (UID: \"dcd9e7e0-afab-4a92-a992-49731b581b0f\") " pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.514872 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gbtq\" (UniqueName: \"kubernetes.io/projected/dcd9e7e0-afab-4a92-a992-49731b581b0f-kube-api-access-7gbtq\") pod \"community-operators-t8hnc\" (UID: \"dcd9e7e0-afab-4a92-a992-49731b581b0f\") " pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.514925 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dcd9e7e0-afab-4a92-a992-49731b581b0f-utilities\") pod \"community-operators-t8hnc\" (UID: \"dcd9e7e0-afab-4a92-a992-49731b581b0f\") " pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.616321 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gbtq\" (UniqueName: \"kubernetes.io/projected/dcd9e7e0-afab-4a92-a992-49731b581b0f-kube-api-access-7gbtq\") pod \"community-operators-t8hnc\" (UID: \"dcd9e7e0-afab-4a92-a992-49731b581b0f\") " pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.616906 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dcd9e7e0-afab-4a92-a992-49731b581b0f-utilities\") pod \"community-operators-t8hnc\" (UID: \"dcd9e7e0-afab-4a92-a992-49731b581b0f\") " pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.617082 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dcd9e7e0-afab-4a92-a992-49731b581b0f-catalog-content\") pod \"community-operators-t8hnc\" (UID: \"dcd9e7e0-afab-4a92-a992-49731b581b0f\") " pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.617285 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dcd9e7e0-afab-4a92-a992-49731b581b0f-utilities\") pod \"community-operators-t8hnc\" (UID: \"dcd9e7e0-afab-4a92-a992-49731b581b0f\") " pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.617755 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dcd9e7e0-afab-4a92-a992-49731b581b0f-catalog-content\") pod \"community-operators-t8hnc\" (UID: \"dcd9e7e0-afab-4a92-a992-49731b581b0f\") " pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.642687 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gbtq\" (UniqueName: \"kubernetes.io/projected/dcd9e7e0-afab-4a92-a992-49731b581b0f-kube-api-access-7gbtq\") pod \"community-operators-t8hnc\" (UID: \"dcd9e7e0-afab-4a92-a992-49731b581b0f\") " pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:35 crc kubenswrapper[4902]: I1202 14:43:35.826929 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:36 crc kubenswrapper[4902]: I1202 14:43:36.345617 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t8hnc"] Dec 02 14:43:37 crc kubenswrapper[4902]: I1202 14:43:37.105179 4902 generic.go:334] "Generic (PLEG): container finished" podID="dcd9e7e0-afab-4a92-a992-49731b581b0f" containerID="a970dac53bb36ea9dc4afc021455fd552bf84cdb67a0676d712451fa1779b416" exitCode=0 Dec 02 14:43:37 crc kubenswrapper[4902]: I1202 14:43:37.105283 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t8hnc" event={"ID":"dcd9e7e0-afab-4a92-a992-49731b581b0f","Type":"ContainerDied","Data":"a970dac53bb36ea9dc4afc021455fd552bf84cdb67a0676d712451fa1779b416"} Dec 02 14:43:37 crc kubenswrapper[4902]: I1202 14:43:37.105708 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t8hnc" event={"ID":"dcd9e7e0-afab-4a92-a992-49731b581b0f","Type":"ContainerStarted","Data":"8ae2ab10fc3a72f8211e32113a000f6def941bd95c3415406daa8a0ae367dcde"} Dec 02 14:43:37 crc kubenswrapper[4902]: I1202 14:43:37.111431 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 14:43:39 crc kubenswrapper[4902]: I1202 14:43:39.127510 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t8hnc" event={"ID":"dcd9e7e0-afab-4a92-a992-49731b581b0f","Type":"ContainerStarted","Data":"e01c111606628308c22798b1d6e2a028a87e77e1a8f335e07468c68ff152d28f"} Dec 02 14:43:40 crc kubenswrapper[4902]: I1202 14:43:40.142285 4902 generic.go:334] "Generic (PLEG): container finished" podID="dcd9e7e0-afab-4a92-a992-49731b581b0f" containerID="e01c111606628308c22798b1d6e2a028a87e77e1a8f335e07468c68ff152d28f" exitCode=0 Dec 02 14:43:40 crc kubenswrapper[4902]: I1202 14:43:40.142341 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t8hnc" event={"ID":"dcd9e7e0-afab-4a92-a992-49731b581b0f","Type":"ContainerDied","Data":"e01c111606628308c22798b1d6e2a028a87e77e1a8f335e07468c68ff152d28f"} Dec 02 14:43:41 crc kubenswrapper[4902]: I1202 14:43:41.154530 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t8hnc" event={"ID":"dcd9e7e0-afab-4a92-a992-49731b581b0f","Type":"ContainerStarted","Data":"8c3c8a2d3e640b566f11dcda18d8c6350992b380ade466032307edf389da6dcc"} Dec 02 14:43:41 crc kubenswrapper[4902]: I1202 14:43:41.191225 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-t8hnc" podStartSLOduration=2.659256742 podStartE2EDuration="6.191205456s" podCreationTimestamp="2025-12-02 14:43:35 +0000 UTC" firstStartedPulling="2025-12-02 14:43:37.111121704 +0000 UTC m=+1648.302430423" lastFinishedPulling="2025-12-02 14:43:40.643070388 +0000 UTC m=+1651.834379137" observedRunningTime="2025-12-02 14:43:41.182692574 +0000 UTC m=+1652.374001443" watchObservedRunningTime="2025-12-02 14:43:41.191205456 +0000 UTC m=+1652.382514175" Dec 02 14:43:45 crc kubenswrapper[4902]: I1202 14:43:45.828052 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:45 crc kubenswrapper[4902]: I1202 14:43:45.828939 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:45 crc kubenswrapper[4902]: I1202 14:43:45.921162 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:46 crc kubenswrapper[4902]: I1202 14:43:46.301793 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:46 crc kubenswrapper[4902]: I1202 14:43:46.360682 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t8hnc"] Dec 02 14:43:48 crc kubenswrapper[4902]: I1202 14:43:48.230757 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-t8hnc" podUID="dcd9e7e0-afab-4a92-a992-49731b581b0f" containerName="registry-server" containerID="cri-o://8c3c8a2d3e640b566f11dcda18d8c6350992b380ade466032307edf389da6dcc" gracePeriod=2 Dec 02 14:43:48 crc kubenswrapper[4902]: I1202 14:43:48.828951 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:48 crc kubenswrapper[4902]: I1202 14:43:48.946693 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gbtq\" (UniqueName: \"kubernetes.io/projected/dcd9e7e0-afab-4a92-a992-49731b581b0f-kube-api-access-7gbtq\") pod \"dcd9e7e0-afab-4a92-a992-49731b581b0f\" (UID: \"dcd9e7e0-afab-4a92-a992-49731b581b0f\") " Dec 02 14:43:48 crc kubenswrapper[4902]: I1202 14:43:48.947120 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dcd9e7e0-afab-4a92-a992-49731b581b0f-utilities\") pod \"dcd9e7e0-afab-4a92-a992-49731b581b0f\" (UID: \"dcd9e7e0-afab-4a92-a992-49731b581b0f\") " Dec 02 14:43:48 crc kubenswrapper[4902]: I1202 14:43:48.947286 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dcd9e7e0-afab-4a92-a992-49731b581b0f-catalog-content\") pod \"dcd9e7e0-afab-4a92-a992-49731b581b0f\" (UID: \"dcd9e7e0-afab-4a92-a992-49731b581b0f\") " Dec 02 14:43:48 crc kubenswrapper[4902]: I1202 14:43:48.947919 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcd9e7e0-afab-4a92-a992-49731b581b0f-utilities" (OuterVolumeSpecName: "utilities") pod "dcd9e7e0-afab-4a92-a992-49731b581b0f" (UID: "dcd9e7e0-afab-4a92-a992-49731b581b0f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:43:48 crc kubenswrapper[4902]: I1202 14:43:48.959016 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcd9e7e0-afab-4a92-a992-49731b581b0f-kube-api-access-7gbtq" (OuterVolumeSpecName: "kube-api-access-7gbtq") pod "dcd9e7e0-afab-4a92-a992-49731b581b0f" (UID: "dcd9e7e0-afab-4a92-a992-49731b581b0f"). InnerVolumeSpecName "kube-api-access-7gbtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.008525 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcd9e7e0-afab-4a92-a992-49731b581b0f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dcd9e7e0-afab-4a92-a992-49731b581b0f" (UID: "dcd9e7e0-afab-4a92-a992-49731b581b0f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.050081 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dcd9e7e0-afab-4a92-a992-49731b581b0f-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.050131 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dcd9e7e0-afab-4a92-a992-49731b581b0f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.050148 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gbtq\" (UniqueName: \"kubernetes.io/projected/dcd9e7e0-afab-4a92-a992-49731b581b0f-kube-api-access-7gbtq\") on node \"crc\" DevicePath \"\"" Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.265713 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t8hnc" event={"ID":"dcd9e7e0-afab-4a92-a992-49731b581b0f","Type":"ContainerDied","Data":"8c3c8a2d3e640b566f11dcda18d8c6350992b380ade466032307edf389da6dcc"} Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.265812 4902 scope.go:117] "RemoveContainer" containerID="8c3c8a2d3e640b566f11dcda18d8c6350992b380ade466032307edf389da6dcc" Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.265946 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t8hnc" Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.265689 4902 generic.go:334] "Generic (PLEG): container finished" podID="dcd9e7e0-afab-4a92-a992-49731b581b0f" containerID="8c3c8a2d3e640b566f11dcda18d8c6350992b380ade466032307edf389da6dcc" exitCode=0 Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.266089 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t8hnc" event={"ID":"dcd9e7e0-afab-4a92-a992-49731b581b0f","Type":"ContainerDied","Data":"8ae2ab10fc3a72f8211e32113a000f6def941bd95c3415406daa8a0ae367dcde"} Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.299667 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t8hnc"] Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.305752 4902 scope.go:117] "RemoveContainer" containerID="e01c111606628308c22798b1d6e2a028a87e77e1a8f335e07468c68ff152d28f" Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.313330 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-t8hnc"] Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.334178 4902 scope.go:117] "RemoveContainer" containerID="a970dac53bb36ea9dc4afc021455fd552bf84cdb67a0676d712451fa1779b416" Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.393618 4902 scope.go:117] "RemoveContainer" containerID="8c3c8a2d3e640b566f11dcda18d8c6350992b380ade466032307edf389da6dcc" Dec 02 14:43:49 crc kubenswrapper[4902]: E1202 14:43:49.394170 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c3c8a2d3e640b566f11dcda18d8c6350992b380ade466032307edf389da6dcc\": container with ID starting with 8c3c8a2d3e640b566f11dcda18d8c6350992b380ade466032307edf389da6dcc not found: ID does not exist" containerID="8c3c8a2d3e640b566f11dcda18d8c6350992b380ade466032307edf389da6dcc" Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.394220 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c3c8a2d3e640b566f11dcda18d8c6350992b380ade466032307edf389da6dcc"} err="failed to get container status \"8c3c8a2d3e640b566f11dcda18d8c6350992b380ade466032307edf389da6dcc\": rpc error: code = NotFound desc = could not find container \"8c3c8a2d3e640b566f11dcda18d8c6350992b380ade466032307edf389da6dcc\": container with ID starting with 8c3c8a2d3e640b566f11dcda18d8c6350992b380ade466032307edf389da6dcc not found: ID does not exist" Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.394267 4902 scope.go:117] "RemoveContainer" containerID="e01c111606628308c22798b1d6e2a028a87e77e1a8f335e07468c68ff152d28f" Dec 02 14:43:49 crc kubenswrapper[4902]: E1202 14:43:49.394738 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e01c111606628308c22798b1d6e2a028a87e77e1a8f335e07468c68ff152d28f\": container with ID starting with e01c111606628308c22798b1d6e2a028a87e77e1a8f335e07468c68ff152d28f not found: ID does not exist" containerID="e01c111606628308c22798b1d6e2a028a87e77e1a8f335e07468c68ff152d28f" Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.394825 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e01c111606628308c22798b1d6e2a028a87e77e1a8f335e07468c68ff152d28f"} err="failed to get container status \"e01c111606628308c22798b1d6e2a028a87e77e1a8f335e07468c68ff152d28f\": rpc error: code = NotFound desc = could not find container \"e01c111606628308c22798b1d6e2a028a87e77e1a8f335e07468c68ff152d28f\": container with ID starting with e01c111606628308c22798b1d6e2a028a87e77e1a8f335e07468c68ff152d28f not found: ID does not exist" Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.394854 4902 scope.go:117] "RemoveContainer" containerID="a970dac53bb36ea9dc4afc021455fd552bf84cdb67a0676d712451fa1779b416" Dec 02 14:43:49 crc kubenswrapper[4902]: E1202 14:43:49.395227 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a970dac53bb36ea9dc4afc021455fd552bf84cdb67a0676d712451fa1779b416\": container with ID starting with a970dac53bb36ea9dc4afc021455fd552bf84cdb67a0676d712451fa1779b416 not found: ID does not exist" containerID="a970dac53bb36ea9dc4afc021455fd552bf84cdb67a0676d712451fa1779b416" Dec 02 14:43:49 crc kubenswrapper[4902]: I1202 14:43:49.395257 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a970dac53bb36ea9dc4afc021455fd552bf84cdb67a0676d712451fa1779b416"} err="failed to get container status \"a970dac53bb36ea9dc4afc021455fd552bf84cdb67a0676d712451fa1779b416\": rpc error: code = NotFound desc = could not find container \"a970dac53bb36ea9dc4afc021455fd552bf84cdb67a0676d712451fa1779b416\": container with ID starting with a970dac53bb36ea9dc4afc021455fd552bf84cdb67a0676d712451fa1779b416 not found: ID does not exist" Dec 02 14:43:51 crc kubenswrapper[4902]: I1202 14:43:51.118586 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcd9e7e0-afab-4a92-a992-49731b581b0f" path="/var/lib/kubelet/pods/dcd9e7e0-afab-4a92-a992-49731b581b0f/volumes" Dec 02 14:44:04 crc kubenswrapper[4902]: I1202 14:44:04.731483 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:44:04 crc kubenswrapper[4902]: I1202 14:44:04.732368 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:44:34 crc kubenswrapper[4902]: I1202 14:44:34.731431 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:44:34 crc kubenswrapper[4902]: I1202 14:44:34.732064 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:44:34 crc kubenswrapper[4902]: I1202 14:44:34.732112 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:44:34 crc kubenswrapper[4902]: I1202 14:44:34.732853 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 14:44:34 crc kubenswrapper[4902]: I1202 14:44:34.732918 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" gracePeriod=600 Dec 02 14:44:34 crc kubenswrapper[4902]: E1202 14:44:34.863365 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:44:34 crc kubenswrapper[4902]: I1202 14:44:34.873145 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" exitCode=0 Dec 02 14:44:34 crc kubenswrapper[4902]: I1202 14:44:34.873195 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f"} Dec 02 14:44:34 crc kubenswrapper[4902]: I1202 14:44:34.873250 4902 scope.go:117] "RemoveContainer" containerID="2cb387f91a89f081e93103a087cb756dd66d0727efc64f860756ae59c75099f5" Dec 02 14:44:34 crc kubenswrapper[4902]: I1202 14:44:34.874031 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:44:34 crc kubenswrapper[4902]: E1202 14:44:34.874313 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:44:38 crc kubenswrapper[4902]: I1202 14:44:38.926182 4902 generic.go:334] "Generic (PLEG): container finished" podID="0d54fbdd-0a68-449d-a2ef-1a4de4c25b02" containerID="697b7f36880d0e715b893fe4e10fac7c1bc305107cd6362ed426edac32623e82" exitCode=0 Dec 02 14:44:38 crc kubenswrapper[4902]: I1202 14:44:38.926301 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" event={"ID":"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02","Type":"ContainerDied","Data":"697b7f36880d0e715b893fe4e10fac7c1bc305107cd6362ed426edac32623e82"} Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.498914 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.557495 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-ssh-key\") pod \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.557594 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-inventory\") pod \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.557700 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-bootstrap-combined-ca-bundle\") pod \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.557780 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlxq9\" (UniqueName: \"kubernetes.io/projected/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-kube-api-access-tlxq9\") pod \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\" (UID: \"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02\") " Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.571742 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "0d54fbdd-0a68-449d-a2ef-1a4de4c25b02" (UID: "0d54fbdd-0a68-449d-a2ef-1a4de4c25b02"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.595148 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-kube-api-access-tlxq9" (OuterVolumeSpecName: "kube-api-access-tlxq9") pod "0d54fbdd-0a68-449d-a2ef-1a4de4c25b02" (UID: "0d54fbdd-0a68-449d-a2ef-1a4de4c25b02"). InnerVolumeSpecName "kube-api-access-tlxq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.637250 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-inventory" (OuterVolumeSpecName: "inventory") pod "0d54fbdd-0a68-449d-a2ef-1a4de4c25b02" (UID: "0d54fbdd-0a68-449d-a2ef-1a4de4c25b02"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.643861 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0d54fbdd-0a68-449d-a2ef-1a4de4c25b02" (UID: "0d54fbdd-0a68-449d-a2ef-1a4de4c25b02"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.660093 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.660136 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.660150 4902 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.660164 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlxq9\" (UniqueName: \"kubernetes.io/projected/0d54fbdd-0a68-449d-a2ef-1a4de4c25b02-kube-api-access-tlxq9\") on node \"crc\" DevicePath \"\"" Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.954682 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" event={"ID":"0d54fbdd-0a68-449d-a2ef-1a4de4c25b02","Type":"ContainerDied","Data":"ab799b4d3fc3d11cbb1db5c77b00b947298130b113a4c1d9b0094d24e043270b"} Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.954724 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab799b4d3fc3d11cbb1db5c77b00b947298130b113a4c1d9b0094d24e043270b" Dec 02 14:44:40 crc kubenswrapper[4902]: I1202 14:44:40.954794 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.048477 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp"] Dec 02 14:44:41 crc kubenswrapper[4902]: E1202 14:44:41.048931 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcd9e7e0-afab-4a92-a992-49731b581b0f" containerName="extract-utilities" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.048957 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcd9e7e0-afab-4a92-a992-49731b581b0f" containerName="extract-utilities" Dec 02 14:44:41 crc kubenswrapper[4902]: E1202 14:44:41.048992 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcd9e7e0-afab-4a92-a992-49731b581b0f" containerName="registry-server" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.049004 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcd9e7e0-afab-4a92-a992-49731b581b0f" containerName="registry-server" Dec 02 14:44:41 crc kubenswrapper[4902]: E1202 14:44:41.049021 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d54fbdd-0a68-449d-a2ef-1a4de4c25b02" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.049030 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d54fbdd-0a68-449d-a2ef-1a4de4c25b02" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 02 14:44:41 crc kubenswrapper[4902]: E1202 14:44:41.049062 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcd9e7e0-afab-4a92-a992-49731b581b0f" containerName="extract-content" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.049070 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcd9e7e0-afab-4a92-a992-49731b581b0f" containerName="extract-content" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.049305 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcd9e7e0-afab-4a92-a992-49731b581b0f" containerName="registry-server" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.049335 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d54fbdd-0a68-449d-a2ef-1a4de4c25b02" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.050306 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.055285 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.055373 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.055717 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.055956 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.063398 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp"] Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.176652 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ts4d7\" (UniqueName: \"kubernetes.io/projected/359a7d26-5917-4e98-be9e-55f4702c2ac7-kube-api-access-ts4d7\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp\" (UID: \"359a7d26-5917-4e98-be9e-55f4702c2ac7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.176744 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/359a7d26-5917-4e98-be9e-55f4702c2ac7-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp\" (UID: \"359a7d26-5917-4e98-be9e-55f4702c2ac7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.176816 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/359a7d26-5917-4e98-be9e-55f4702c2ac7-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp\" (UID: \"359a7d26-5917-4e98-be9e-55f4702c2ac7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.278745 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ts4d7\" (UniqueName: \"kubernetes.io/projected/359a7d26-5917-4e98-be9e-55f4702c2ac7-kube-api-access-ts4d7\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp\" (UID: \"359a7d26-5917-4e98-be9e-55f4702c2ac7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.278889 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/359a7d26-5917-4e98-be9e-55f4702c2ac7-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp\" (UID: \"359a7d26-5917-4e98-be9e-55f4702c2ac7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.278991 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/359a7d26-5917-4e98-be9e-55f4702c2ac7-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp\" (UID: \"359a7d26-5917-4e98-be9e-55f4702c2ac7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.282800 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/359a7d26-5917-4e98-be9e-55f4702c2ac7-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp\" (UID: \"359a7d26-5917-4e98-be9e-55f4702c2ac7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.283724 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/359a7d26-5917-4e98-be9e-55f4702c2ac7-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp\" (UID: \"359a7d26-5917-4e98-be9e-55f4702c2ac7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.298872 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ts4d7\" (UniqueName: \"kubernetes.io/projected/359a7d26-5917-4e98-be9e-55f4702c2ac7-kube-api-access-ts4d7\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp\" (UID: \"359a7d26-5917-4e98-be9e-55f4702c2ac7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.390643 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.861847 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp"] Dec 02 14:44:41 crc kubenswrapper[4902]: W1202 14:44:41.875817 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod359a7d26_5917_4e98_be9e_55f4702c2ac7.slice/crio-d5427f2293c9024aa8e23f57da96187c0fc557b95d71f430e9646aba9a8826aa WatchSource:0}: Error finding container d5427f2293c9024aa8e23f57da96187c0fc557b95d71f430e9646aba9a8826aa: Status 404 returned error can't find the container with id d5427f2293c9024aa8e23f57da96187c0fc557b95d71f430e9646aba9a8826aa Dec 02 14:44:41 crc kubenswrapper[4902]: I1202 14:44:41.967641 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" event={"ID":"359a7d26-5917-4e98-be9e-55f4702c2ac7","Type":"ContainerStarted","Data":"d5427f2293c9024aa8e23f57da96187c0fc557b95d71f430e9646aba9a8826aa"} Dec 02 14:44:42 crc kubenswrapper[4902]: I1202 14:44:42.994540 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" event={"ID":"359a7d26-5917-4e98-be9e-55f4702c2ac7","Type":"ContainerStarted","Data":"9490ea99015bcb2119f598fd0bd08267ce221259742e08b6e1324610598f8f8f"} Dec 02 14:44:43 crc kubenswrapper[4902]: I1202 14:44:43.027400 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" podStartSLOduration=1.4919896449999999 podStartE2EDuration="2.027379192s" podCreationTimestamp="2025-12-02 14:44:41 +0000 UTC" firstStartedPulling="2025-12-02 14:44:41.87987655 +0000 UTC m=+1713.071185269" lastFinishedPulling="2025-12-02 14:44:42.415266097 +0000 UTC m=+1713.606574816" observedRunningTime="2025-12-02 14:44:43.019190009 +0000 UTC m=+1714.210498738" watchObservedRunningTime="2025-12-02 14:44:43.027379192 +0000 UTC m=+1714.218687911" Dec 02 14:44:49 crc kubenswrapper[4902]: I1202 14:44:49.122712 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:44:49 crc kubenswrapper[4902]: E1202 14:44:49.123613 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:44:50 crc kubenswrapper[4902]: I1202 14:44:50.092971 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-create-kwmmj"] Dec 02 14:44:50 crc kubenswrapper[4902]: I1202 14:44:50.107709 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-create-kwmmj"] Dec 02 14:44:50 crc kubenswrapper[4902]: I1202 14:44:50.121985 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-832f-account-create-update-8ns89"] Dec 02 14:44:50 crc kubenswrapper[4902]: I1202 14:44:50.135936 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-832f-account-create-update-8ns89"] Dec 02 14:44:51 crc kubenswrapper[4902]: I1202 14:44:51.117473 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1df2b5da-f651-420f-b497-67bfab58faad" path="/var/lib/kubelet/pods/1df2b5da-f651-420f-b497-67bfab58faad/volumes" Dec 02 14:44:51 crc kubenswrapper[4902]: I1202 14:44:51.118687 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20fead8d-5003-473f-b71d-096c2877181d" path="/var/lib/kubelet/pods/20fead8d-5003-473f-b71d-096c2877181d/volumes" Dec 02 14:44:58 crc kubenswrapper[4902]: I1202 14:44:58.055603 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-3087-account-create-update-857qc"] Dec 02 14:44:58 crc kubenswrapper[4902]: I1202 14:44:58.070765 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-fxwsg"] Dec 02 14:44:58 crc kubenswrapper[4902]: I1202 14:44:58.080270 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-f70d-account-create-update-8qqq4"] Dec 02 14:44:58 crc kubenswrapper[4902]: I1202 14:44:58.087397 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-3087-account-create-update-857qc"] Dec 02 14:44:58 crc kubenswrapper[4902]: I1202 14:44:58.096109 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-5e37-account-create-update-hqmwb"] Dec 02 14:44:58 crc kubenswrapper[4902]: I1202 14:44:58.104606 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-f70d-account-create-update-8qqq4"] Dec 02 14:44:58 crc kubenswrapper[4902]: I1202 14:44:58.113753 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-fxwsg"] Dec 02 14:44:58 crc kubenswrapper[4902]: I1202 14:44:58.122026 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-x6s4c"] Dec 02 14:44:58 crc kubenswrapper[4902]: I1202 14:44:58.130894 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-5mrvt"] Dec 02 14:44:58 crc kubenswrapper[4902]: I1202 14:44:58.140036 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-5e37-account-create-update-hqmwb"] Dec 02 14:44:58 crc kubenswrapper[4902]: I1202 14:44:58.148036 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-x6s4c"] Dec 02 14:44:58 crc kubenswrapper[4902]: I1202 14:44:58.157179 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-5mrvt"] Dec 02 14:44:59 crc kubenswrapper[4902]: I1202 14:44:59.128751 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28512196-23fd-4d08-838b-fa37482cb529" path="/var/lib/kubelet/pods/28512196-23fd-4d08-838b-fa37482cb529/volumes" Dec 02 14:44:59 crc kubenswrapper[4902]: I1202 14:44:59.129692 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54991fb3-bb13-413e-a8f1-bf85d25ed2dd" path="/var/lib/kubelet/pods/54991fb3-bb13-413e-a8f1-bf85d25ed2dd/volumes" Dec 02 14:44:59 crc kubenswrapper[4902]: I1202 14:44:59.132490 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="778f3550-079c-40ca-bb05-8e2652592b93" path="/var/lib/kubelet/pods/778f3550-079c-40ca-bb05-8e2652592b93/volumes" Dec 02 14:44:59 crc kubenswrapper[4902]: I1202 14:44:59.134643 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8" path="/var/lib/kubelet/pods/7b9ad5fa-afba-4ca0-aeba-ccaf8fc9d6a8/volumes" Dec 02 14:44:59 crc kubenswrapper[4902]: I1202 14:44:59.136523 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9048046c-7133-4c81-b7ae-a37062e07f2d" path="/var/lib/kubelet/pods/9048046c-7133-4c81-b7ae-a37062e07f2d/volumes" Dec 02 14:44:59 crc kubenswrapper[4902]: I1202 14:44:59.137841 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c79841e5-8f30-49ec-ac9f-3be1fc08db4e" path="/var/lib/kubelet/pods/c79841e5-8f30-49ec-ac9f-3be1fc08db4e/volumes" Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.158209 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm"] Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.161534 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.164216 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.165171 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.169688 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm"] Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.293429 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dfef4315-d5ee-4ab6-8aef-deda4e073aec-secret-volume\") pod \"collect-profiles-29411445-449wm\" (UID: \"dfef4315-d5ee-4ab6-8aef-deda4e073aec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.293484 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dfef4315-d5ee-4ab6-8aef-deda4e073aec-config-volume\") pod \"collect-profiles-29411445-449wm\" (UID: \"dfef4315-d5ee-4ab6-8aef-deda4e073aec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.293526 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj687\" (UniqueName: \"kubernetes.io/projected/dfef4315-d5ee-4ab6-8aef-deda4e073aec-kube-api-access-hj687\") pod \"collect-profiles-29411445-449wm\" (UID: \"dfef4315-d5ee-4ab6-8aef-deda4e073aec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.395184 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dfef4315-d5ee-4ab6-8aef-deda4e073aec-secret-volume\") pod \"collect-profiles-29411445-449wm\" (UID: \"dfef4315-d5ee-4ab6-8aef-deda4e073aec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.395252 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dfef4315-d5ee-4ab6-8aef-deda4e073aec-config-volume\") pod \"collect-profiles-29411445-449wm\" (UID: \"dfef4315-d5ee-4ab6-8aef-deda4e073aec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.395285 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj687\" (UniqueName: \"kubernetes.io/projected/dfef4315-d5ee-4ab6-8aef-deda4e073aec-kube-api-access-hj687\") pod \"collect-profiles-29411445-449wm\" (UID: \"dfef4315-d5ee-4ab6-8aef-deda4e073aec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.397214 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dfef4315-d5ee-4ab6-8aef-deda4e073aec-config-volume\") pod \"collect-profiles-29411445-449wm\" (UID: \"dfef4315-d5ee-4ab6-8aef-deda4e073aec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.400988 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dfef4315-d5ee-4ab6-8aef-deda4e073aec-secret-volume\") pod \"collect-profiles-29411445-449wm\" (UID: \"dfef4315-d5ee-4ab6-8aef-deda4e073aec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.412106 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj687\" (UniqueName: \"kubernetes.io/projected/dfef4315-d5ee-4ab6-8aef-deda4e073aec-kube-api-access-hj687\") pod \"collect-profiles-29411445-449wm\" (UID: \"dfef4315-d5ee-4ab6-8aef-deda4e073aec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.488169 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" Dec 02 14:45:00 crc kubenswrapper[4902]: W1202 14:45:00.982932 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddfef4315_d5ee_4ab6_8aef_deda4e073aec.slice/crio-a73082c4c50137a99ddea6fc36f500e90e93c74fa9e1abcb178db5859f868034 WatchSource:0}: Error finding container a73082c4c50137a99ddea6fc36f500e90e93c74fa9e1abcb178db5859f868034: Status 404 returned error can't find the container with id a73082c4c50137a99ddea6fc36f500e90e93c74fa9e1abcb178db5859f868034 Dec 02 14:45:00 crc kubenswrapper[4902]: I1202 14:45:00.995911 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm"] Dec 02 14:45:01 crc kubenswrapper[4902]: I1202 14:45:01.191008 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" event={"ID":"dfef4315-d5ee-4ab6-8aef-deda4e073aec","Type":"ContainerStarted","Data":"a73082c4c50137a99ddea6fc36f500e90e93c74fa9e1abcb178db5859f868034"} Dec 02 14:45:02 crc kubenswrapper[4902]: I1202 14:45:02.207837 4902 generic.go:334] "Generic (PLEG): container finished" podID="dfef4315-d5ee-4ab6-8aef-deda4e073aec" containerID="0457824933db1ed4853254df0ac242ed42b30057c21b151e5f86e80c4b049245" exitCode=0 Dec 02 14:45:02 crc kubenswrapper[4902]: I1202 14:45:02.208022 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" event={"ID":"dfef4315-d5ee-4ab6-8aef-deda4e073aec","Type":"ContainerDied","Data":"0457824933db1ed4853254df0ac242ed42b30057c21b151e5f86e80c4b049245"} Dec 02 14:45:03 crc kubenswrapper[4902]: I1202 14:45:03.651016 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" Dec 02 14:45:03 crc kubenswrapper[4902]: I1202 14:45:03.769031 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hj687\" (UniqueName: \"kubernetes.io/projected/dfef4315-d5ee-4ab6-8aef-deda4e073aec-kube-api-access-hj687\") pod \"dfef4315-d5ee-4ab6-8aef-deda4e073aec\" (UID: \"dfef4315-d5ee-4ab6-8aef-deda4e073aec\") " Dec 02 14:45:03 crc kubenswrapper[4902]: I1202 14:45:03.769367 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dfef4315-d5ee-4ab6-8aef-deda4e073aec-secret-volume\") pod \"dfef4315-d5ee-4ab6-8aef-deda4e073aec\" (UID: \"dfef4315-d5ee-4ab6-8aef-deda4e073aec\") " Dec 02 14:45:03 crc kubenswrapper[4902]: I1202 14:45:03.769466 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dfef4315-d5ee-4ab6-8aef-deda4e073aec-config-volume\") pod \"dfef4315-d5ee-4ab6-8aef-deda4e073aec\" (UID: \"dfef4315-d5ee-4ab6-8aef-deda4e073aec\") " Dec 02 14:45:03 crc kubenswrapper[4902]: I1202 14:45:03.770212 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dfef4315-d5ee-4ab6-8aef-deda4e073aec-config-volume" (OuterVolumeSpecName: "config-volume") pod "dfef4315-d5ee-4ab6-8aef-deda4e073aec" (UID: "dfef4315-d5ee-4ab6-8aef-deda4e073aec"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:45:03 crc kubenswrapper[4902]: I1202 14:45:03.775221 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfef4315-d5ee-4ab6-8aef-deda4e073aec-kube-api-access-hj687" (OuterVolumeSpecName: "kube-api-access-hj687") pod "dfef4315-d5ee-4ab6-8aef-deda4e073aec" (UID: "dfef4315-d5ee-4ab6-8aef-deda4e073aec"). InnerVolumeSpecName "kube-api-access-hj687". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:45:03 crc kubenswrapper[4902]: I1202 14:45:03.775255 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfef4315-d5ee-4ab6-8aef-deda4e073aec-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "dfef4315-d5ee-4ab6-8aef-deda4e073aec" (UID: "dfef4315-d5ee-4ab6-8aef-deda4e073aec"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:45:03 crc kubenswrapper[4902]: I1202 14:45:03.871708 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hj687\" (UniqueName: \"kubernetes.io/projected/dfef4315-d5ee-4ab6-8aef-deda4e073aec-kube-api-access-hj687\") on node \"crc\" DevicePath \"\"" Dec 02 14:45:03 crc kubenswrapper[4902]: I1202 14:45:03.871742 4902 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dfef4315-d5ee-4ab6-8aef-deda4e073aec-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 14:45:03 crc kubenswrapper[4902]: I1202 14:45:03.871755 4902 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dfef4315-d5ee-4ab6-8aef-deda4e073aec-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 14:45:04 crc kubenswrapper[4902]: I1202 14:45:04.107268 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:45:04 crc kubenswrapper[4902]: E1202 14:45:04.107809 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:45:04 crc kubenswrapper[4902]: I1202 14:45:04.240272 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" event={"ID":"dfef4315-d5ee-4ab6-8aef-deda4e073aec","Type":"ContainerDied","Data":"a73082c4c50137a99ddea6fc36f500e90e93c74fa9e1abcb178db5859f868034"} Dec 02 14:45:04 crc kubenswrapper[4902]: I1202 14:45:04.240348 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a73082c4c50137a99ddea6fc36f500e90e93c74fa9e1abcb178db5859f868034" Dec 02 14:45:04 crc kubenswrapper[4902]: I1202 14:45:04.240374 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm" Dec 02 14:45:05 crc kubenswrapper[4902]: I1202 14:45:05.042478 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-6886-account-create-update-dmh6g"] Dec 02 14:45:05 crc kubenswrapper[4902]: I1202 14:45:05.053911 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-jrhfj"] Dec 02 14:45:05 crc kubenswrapper[4902]: I1202 14:45:05.065348 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-7jf2x"] Dec 02 14:45:05 crc kubenswrapper[4902]: I1202 14:45:05.073391 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-6886-account-create-update-dmh6g"] Dec 02 14:45:05 crc kubenswrapper[4902]: I1202 14:45:05.083853 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-jrhfj"] Dec 02 14:45:05 crc kubenswrapper[4902]: I1202 14:45:05.092834 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-0019-account-create-update-l7vfs"] Dec 02 14:45:05 crc kubenswrapper[4902]: I1202 14:45:05.104352 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-7jf2x"] Dec 02 14:45:05 crc kubenswrapper[4902]: I1202 14:45:05.118154 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a94c2aa-4287-434c-bf10-99b25a39ce73" path="/var/lib/kubelet/pods/6a94c2aa-4287-434c-bf10-99b25a39ce73/volumes" Dec 02 14:45:05 crc kubenswrapper[4902]: I1202 14:45:05.119475 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f1c4d14-f9ad-4470-8165-d9c6375be0bc" path="/var/lib/kubelet/pods/7f1c4d14-f9ad-4470-8165-d9c6375be0bc/volumes" Dec 02 14:45:05 crc kubenswrapper[4902]: I1202 14:45:05.120413 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="987ec49c-d8be-4d61-ba4a-7f5ee3016cf1" path="/var/lib/kubelet/pods/987ec49c-d8be-4d61-ba4a-7f5ee3016cf1/volumes" Dec 02 14:45:05 crc kubenswrapper[4902]: I1202 14:45:05.121068 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-0019-account-create-update-l7vfs"] Dec 02 14:45:06 crc kubenswrapper[4902]: I1202 14:45:06.038035 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-gs4g4"] Dec 02 14:45:06 crc kubenswrapper[4902]: I1202 14:45:06.046763 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-b438-account-create-update-9wj9w"] Dec 02 14:45:06 crc kubenswrapper[4902]: I1202 14:45:06.056897 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-b438-account-create-update-9wj9w"] Dec 02 14:45:06 crc kubenswrapper[4902]: I1202 14:45:06.065205 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-gs4g4"] Dec 02 14:45:07 crc kubenswrapper[4902]: I1202 14:45:07.118624 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d07765f-4bd5-46d0-9955-ccbd681f556b" path="/var/lib/kubelet/pods/2d07765f-4bd5-46d0-9955-ccbd681f556b/volumes" Dec 02 14:45:07 crc kubenswrapper[4902]: I1202 14:45:07.119825 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33f979ec-84eb-43b4-813a-32c7506b868f" path="/var/lib/kubelet/pods/33f979ec-84eb-43b4-813a-32c7506b868f/volumes" Dec 02 14:45:07 crc kubenswrapper[4902]: I1202 14:45:07.120515 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a752ac02-1cb9-4927-b81a-d466abb1d58e" path="/var/lib/kubelet/pods/a752ac02-1cb9-4927-b81a-d466abb1d58e/volumes" Dec 02 14:45:19 crc kubenswrapper[4902]: I1202 14:45:19.122968 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:45:19 crc kubenswrapper[4902]: E1202 14:45:19.124765 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:45:29 crc kubenswrapper[4902]: I1202 14:45:29.347859 4902 scope.go:117] "RemoveContainer" containerID="624cc007993e50fb3bddc9f6e14d3412b61dbec28908d37372affac18f028677" Dec 02 14:45:29 crc kubenswrapper[4902]: I1202 14:45:29.369364 4902 scope.go:117] "RemoveContainer" containerID="97a4f0b0f3ebfc2371d584e1bc7b312d2336cc0d91fcb33d52e89fc3fe056094" Dec 02 14:45:29 crc kubenswrapper[4902]: I1202 14:45:29.421776 4902 scope.go:117] "RemoveContainer" containerID="8eed2de031f89db2dc948219ba6b135869c1d9b719e7f56959140bc2587b9908" Dec 02 14:45:29 crc kubenswrapper[4902]: I1202 14:45:29.464652 4902 scope.go:117] "RemoveContainer" containerID="bef0336660e71ddff7d4548e11bc2d01e8096e924f34ba996f6b785e8b646e7a" Dec 02 14:45:29 crc kubenswrapper[4902]: I1202 14:45:29.512645 4902 scope.go:117] "RemoveContainer" containerID="697664da42902613f1144f376242de74a5779eaa479fa28ae9191d5f64159457" Dec 02 14:45:29 crc kubenswrapper[4902]: I1202 14:45:29.592235 4902 scope.go:117] "RemoveContainer" containerID="f304fde205272911e2bfa0eca611196be5b17575507fe9c70eeb9caed6dc9216" Dec 02 14:45:29 crc kubenswrapper[4902]: I1202 14:45:29.644956 4902 scope.go:117] "RemoveContainer" containerID="5c0ad0817cdf4b7b4cba6c31348a5bba5fe353b0e2796b6e6ecea5a87d5bed41" Dec 02 14:45:29 crc kubenswrapper[4902]: I1202 14:45:29.690168 4902 scope.go:117] "RemoveContainer" containerID="97168a240ab2094d40f6493794122d8c35cd1946080123b0a9c00989470034a3" Dec 02 14:45:29 crc kubenswrapper[4902]: I1202 14:45:29.725867 4902 scope.go:117] "RemoveContainer" containerID="86beb17541ea64955ebbd7d106dac218ea8d61a37f8d441e7bdde82c0d929a1b" Dec 02 14:45:29 crc kubenswrapper[4902]: I1202 14:45:29.744941 4902 scope.go:117] "RemoveContainer" containerID="9361276f6eeb53ae0225d4413033c14c1bcbcce210e8b6ca89d1ad06d8dc1326" Dec 02 14:45:29 crc kubenswrapper[4902]: I1202 14:45:29.761743 4902 scope.go:117] "RemoveContainer" containerID="ab8590788020831da214658e16998074356ce9e4d20a6dbba0527b9cd2ae7cdd" Dec 02 14:45:29 crc kubenswrapper[4902]: I1202 14:45:29.779651 4902 scope.go:117] "RemoveContainer" containerID="06b40d9e5bd3eaa58b90fa70c39882119c3ff48e0300d41e3da9ae046219736f" Dec 02 14:45:29 crc kubenswrapper[4902]: I1202 14:45:29.800502 4902 scope.go:117] "RemoveContainer" containerID="d2740e149fd66ee0ac278ba7a94ea65f4bda9a6e1e2edcc2769605cd196c2ba5" Dec 02 14:45:29 crc kubenswrapper[4902]: I1202 14:45:29.828253 4902 scope.go:117] "RemoveContainer" containerID="c9b77d35f8e607502d1ce5219844571e35718ee59b530d64130e4528ba240dec" Dec 02 14:45:31 crc kubenswrapper[4902]: I1202 14:45:31.106300 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:45:31 crc kubenswrapper[4902]: E1202 14:45:31.106739 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:45:40 crc kubenswrapper[4902]: I1202 14:45:40.057130 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-8nfb5"] Dec 02 14:45:40 crc kubenswrapper[4902]: I1202 14:45:40.071850 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-8nfb5"] Dec 02 14:45:41 crc kubenswrapper[4902]: I1202 14:45:41.123555 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90848399-1e3c-448f-a1c1-8dd64a608fdb" path="/var/lib/kubelet/pods/90848399-1e3c-448f-a1c1-8dd64a608fdb/volumes" Dec 02 14:45:43 crc kubenswrapper[4902]: I1202 14:45:43.047119 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-sync-jb52r"] Dec 02 14:45:43 crc kubenswrapper[4902]: I1202 14:45:43.061661 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-sync-jb52r"] Dec 02 14:45:43 crc kubenswrapper[4902]: I1202 14:45:43.137088 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0782122-af51-4295-bfc8-fb4a0388cdd0" path="/var/lib/kubelet/pods/f0782122-af51-4295-bfc8-fb4a0388cdd0/volumes" Dec 02 14:45:44 crc kubenswrapper[4902]: I1202 14:45:44.106883 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:45:44 crc kubenswrapper[4902]: E1202 14:45:44.107142 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:45:56 crc kubenswrapper[4902]: I1202 14:45:56.106668 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:45:56 crc kubenswrapper[4902]: E1202 14:45:56.107521 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:46:02 crc kubenswrapper[4902]: I1202 14:46:02.042136 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-8gzqw"] Dec 02 14:46:02 crc kubenswrapper[4902]: I1202 14:46:02.054406 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-8gzqw"] Dec 02 14:46:03 crc kubenswrapper[4902]: I1202 14:46:03.128469 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7ca0ff1-b6ca-496c-b02e-71cc900b4433" path="/var/lib/kubelet/pods/a7ca0ff1-b6ca-496c-b02e-71cc900b4433/volumes" Dec 02 14:46:09 crc kubenswrapper[4902]: I1202 14:46:09.119071 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:46:09 crc kubenswrapper[4902]: E1202 14:46:09.120028 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:46:17 crc kubenswrapper[4902]: I1202 14:46:17.038873 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-gmq8h"] Dec 02 14:46:17 crc kubenswrapper[4902]: I1202 14:46:17.050937 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-gmq8h"] Dec 02 14:46:17 crc kubenswrapper[4902]: I1202 14:46:17.121736 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7c64325-9e55-496c-b643-2e053ee69bdb" path="/var/lib/kubelet/pods/b7c64325-9e55-496c-b643-2e053ee69bdb/volumes" Dec 02 14:46:23 crc kubenswrapper[4902]: I1202 14:46:23.106926 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:46:23 crc kubenswrapper[4902]: E1202 14:46:23.107933 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:46:25 crc kubenswrapper[4902]: I1202 14:46:25.038640 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-q9f9f"] Dec 02 14:46:25 crc kubenswrapper[4902]: I1202 14:46:25.054887 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-q9f9f"] Dec 02 14:46:25 crc kubenswrapper[4902]: I1202 14:46:25.118579 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4666a144-4ec6-43f9-b3d9-74d0bef2e9a9" path="/var/lib/kubelet/pods/4666a144-4ec6-43f9-b3d9-74d0bef2e9a9/volumes" Dec 02 14:46:28 crc kubenswrapper[4902]: I1202 14:46:28.038020 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-sr89q"] Dec 02 14:46:28 crc kubenswrapper[4902]: I1202 14:46:28.045783 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-sr89q"] Dec 02 14:46:29 crc kubenswrapper[4902]: I1202 14:46:29.119176 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="082d182d-ad5d-4d70-ab12-bac95950cc73" path="/var/lib/kubelet/pods/082d182d-ad5d-4d70-ab12-bac95950cc73/volumes" Dec 02 14:46:30 crc kubenswrapper[4902]: I1202 14:46:30.142367 4902 scope.go:117] "RemoveContainer" containerID="06603acd7ccdef64ab15b6c4d400dd45df7d55e7572ab85d4c7e59e2e97f44d0" Dec 02 14:46:30 crc kubenswrapper[4902]: I1202 14:46:30.418749 4902 scope.go:117] "RemoveContainer" containerID="325ce819002deb3f3e6afc1ff534c1d379ad8f8ed5a3b731059a4296080f866f" Dec 02 14:46:30 crc kubenswrapper[4902]: I1202 14:46:30.474649 4902 scope.go:117] "RemoveContainer" containerID="f989e94db4d2c2cba09db820d4f65afbabc25c5810021646cdafbe57c6a7c61c" Dec 02 14:46:30 crc kubenswrapper[4902]: I1202 14:46:30.526763 4902 scope.go:117] "RemoveContainer" containerID="354b6351255c643eebd7bf96c2d062773e0d2a686363e51f7b340b618afd4664" Dec 02 14:46:30 crc kubenswrapper[4902]: I1202 14:46:30.586242 4902 scope.go:117] "RemoveContainer" containerID="67c9aacbc2361d46791baa6754d958c3fa0c997834114937bb2c888c3eaaf2b4" Dec 02 14:46:30 crc kubenswrapper[4902]: I1202 14:46:30.612427 4902 scope.go:117] "RemoveContainer" containerID="c570b9171f63f7ba00919ddd33666fbba8786ee70486ddf871a2ec9d2310126e" Dec 02 14:46:34 crc kubenswrapper[4902]: I1202 14:46:34.619219 4902 generic.go:334] "Generic (PLEG): container finished" podID="359a7d26-5917-4e98-be9e-55f4702c2ac7" containerID="9490ea99015bcb2119f598fd0bd08267ce221259742e08b6e1324610598f8f8f" exitCode=0 Dec 02 14:46:34 crc kubenswrapper[4902]: I1202 14:46:34.619454 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" event={"ID":"359a7d26-5917-4e98-be9e-55f4702c2ac7","Type":"ContainerDied","Data":"9490ea99015bcb2119f598fd0bd08267ce221259742e08b6e1324610598f8f8f"} Dec 02 14:46:35 crc kubenswrapper[4902]: I1202 14:46:35.107844 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:46:35 crc kubenswrapper[4902]: E1202 14:46:35.108710 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.083491 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.105040 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/359a7d26-5917-4e98-be9e-55f4702c2ac7-inventory\") pod \"359a7d26-5917-4e98-be9e-55f4702c2ac7\" (UID: \"359a7d26-5917-4e98-be9e-55f4702c2ac7\") " Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.106257 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ts4d7\" (UniqueName: \"kubernetes.io/projected/359a7d26-5917-4e98-be9e-55f4702c2ac7-kube-api-access-ts4d7\") pod \"359a7d26-5917-4e98-be9e-55f4702c2ac7\" (UID: \"359a7d26-5917-4e98-be9e-55f4702c2ac7\") " Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.106406 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/359a7d26-5917-4e98-be9e-55f4702c2ac7-ssh-key\") pod \"359a7d26-5917-4e98-be9e-55f4702c2ac7\" (UID: \"359a7d26-5917-4e98-be9e-55f4702c2ac7\") " Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.115043 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/359a7d26-5917-4e98-be9e-55f4702c2ac7-kube-api-access-ts4d7" (OuterVolumeSpecName: "kube-api-access-ts4d7") pod "359a7d26-5917-4e98-be9e-55f4702c2ac7" (UID: "359a7d26-5917-4e98-be9e-55f4702c2ac7"). InnerVolumeSpecName "kube-api-access-ts4d7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.143859 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/359a7d26-5917-4e98-be9e-55f4702c2ac7-inventory" (OuterVolumeSpecName: "inventory") pod "359a7d26-5917-4e98-be9e-55f4702c2ac7" (UID: "359a7d26-5917-4e98-be9e-55f4702c2ac7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.164121 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/359a7d26-5917-4e98-be9e-55f4702c2ac7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "359a7d26-5917-4e98-be9e-55f4702c2ac7" (UID: "359a7d26-5917-4e98-be9e-55f4702c2ac7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.211496 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ts4d7\" (UniqueName: \"kubernetes.io/projected/359a7d26-5917-4e98-be9e-55f4702c2ac7-kube-api-access-ts4d7\") on node \"crc\" DevicePath \"\"" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.211617 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/359a7d26-5917-4e98-be9e-55f4702c2ac7-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.211657 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/359a7d26-5917-4e98-be9e-55f4702c2ac7-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.650842 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" event={"ID":"359a7d26-5917-4e98-be9e-55f4702c2ac7","Type":"ContainerDied","Data":"d5427f2293c9024aa8e23f57da96187c0fc557b95d71f430e9646aba9a8826aa"} Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.650889 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5427f2293c9024aa8e23f57da96187c0fc557b95d71f430e9646aba9a8826aa" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.650935 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.748801 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2"] Dec 02 14:46:36 crc kubenswrapper[4902]: E1202 14:46:36.749268 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="359a7d26-5917-4e98-be9e-55f4702c2ac7" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.749292 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="359a7d26-5917-4e98-be9e-55f4702c2ac7" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 02 14:46:36 crc kubenswrapper[4902]: E1202 14:46:36.749345 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfef4315-d5ee-4ab6-8aef-deda4e073aec" containerName="collect-profiles" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.749354 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfef4315-d5ee-4ab6-8aef-deda4e073aec" containerName="collect-profiles" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.749664 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="359a7d26-5917-4e98-be9e-55f4702c2ac7" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.749703 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfef4315-d5ee-4ab6-8aef-deda4e073aec" containerName="collect-profiles" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.751332 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.754328 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.755119 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.755203 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.755279 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.769576 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2"] Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.823183 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2\" (UID: \"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.827690 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kmxg\" (UniqueName: \"kubernetes.io/projected/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-kube-api-access-5kmxg\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2\" (UID: \"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.827809 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2\" (UID: \"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.931442 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kmxg\" (UniqueName: \"kubernetes.io/projected/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-kube-api-access-5kmxg\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2\" (UID: \"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.932549 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2\" (UID: \"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.933169 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2\" (UID: \"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.937552 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2\" (UID: \"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.937780 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2\" (UID: \"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" Dec 02 14:46:36 crc kubenswrapper[4902]: I1202 14:46:36.951193 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kmxg\" (UniqueName: \"kubernetes.io/projected/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-kube-api-access-5kmxg\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2\" (UID: \"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" Dec 02 14:46:37 crc kubenswrapper[4902]: I1202 14:46:37.119848 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" Dec 02 14:46:37 crc kubenswrapper[4902]: I1202 14:46:37.727398 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2"] Dec 02 14:46:38 crc kubenswrapper[4902]: I1202 14:46:38.671141 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" event={"ID":"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203","Type":"ContainerStarted","Data":"e5357370bbe406066bbc5e1bbd71b371101d52dc99a6aad035edeb01d373809a"} Dec 02 14:46:39 crc kubenswrapper[4902]: I1202 14:46:39.686850 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" event={"ID":"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203","Type":"ContainerStarted","Data":"ebdd11faf7fe3871bb69a42b7956ffa184d8998a694c20576719334dd59a472b"} Dec 02 14:46:39 crc kubenswrapper[4902]: I1202 14:46:39.721448 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" podStartSLOduration=3.098170971 podStartE2EDuration="3.721420108s" podCreationTimestamp="2025-12-02 14:46:36 +0000 UTC" firstStartedPulling="2025-12-02 14:46:37.719953806 +0000 UTC m=+1828.911262515" lastFinishedPulling="2025-12-02 14:46:38.343202933 +0000 UTC m=+1829.534511652" observedRunningTime="2025-12-02 14:46:39.715830409 +0000 UTC m=+1830.907139118" watchObservedRunningTime="2025-12-02 14:46:39.721420108 +0000 UTC m=+1830.912728857" Dec 02 14:46:46 crc kubenswrapper[4902]: I1202 14:46:46.106379 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:46:46 crc kubenswrapper[4902]: E1202 14:46:46.107293 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:46:47 crc kubenswrapper[4902]: I1202 14:46:47.043872 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-rgnm2"] Dec 02 14:46:47 crc kubenswrapper[4902]: I1202 14:46:47.056488 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-rgnm2"] Dec 02 14:46:47 crc kubenswrapper[4902]: I1202 14:46:47.123887 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52b3cd40-3726-4b52-8e64-8b15f5f02a99" path="/var/lib/kubelet/pods/52b3cd40-3726-4b52-8e64-8b15f5f02a99/volumes" Dec 02 14:46:48 crc kubenswrapper[4902]: I1202 14:46:48.025844 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-7n2ss"] Dec 02 14:46:48 crc kubenswrapper[4902]: I1202 14:46:48.034290 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-7n2ss"] Dec 02 14:46:49 crc kubenswrapper[4902]: I1202 14:46:49.130051 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c39a4fe-20e0-458b-a226-d6f5ad9cb846" path="/var/lib/kubelet/pods/9c39a4fe-20e0-458b-a226-d6f5ad9cb846/volumes" Dec 02 14:46:58 crc kubenswrapper[4902]: I1202 14:46:58.107523 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:46:58 crc kubenswrapper[4902]: E1202 14:46:58.108132 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:47:11 crc kubenswrapper[4902]: I1202 14:47:11.110217 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:47:11 crc kubenswrapper[4902]: E1202 14:47:11.111498 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:47:21 crc kubenswrapper[4902]: I1202 14:47:21.057857 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-5e4d-account-create-update-xvr22"] Dec 02 14:47:21 crc kubenswrapper[4902]: I1202 14:47:21.067604 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-5f7jd"] Dec 02 14:47:21 crc kubenswrapper[4902]: I1202 14:47:21.079295 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-5e4d-account-create-update-xvr22"] Dec 02 14:47:21 crc kubenswrapper[4902]: I1202 14:47:21.087085 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-th6wm"] Dec 02 14:47:21 crc kubenswrapper[4902]: I1202 14:47:21.094212 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-75k7n"] Dec 02 14:47:21 crc kubenswrapper[4902]: I1202 14:47:21.101242 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-5f7jd"] Dec 02 14:47:21 crc kubenswrapper[4902]: I1202 14:47:21.137091 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0eb390fd-b392-4ec8-990b-7ed540083355" path="/var/lib/kubelet/pods/0eb390fd-b392-4ec8-990b-7ed540083355/volumes" Dec 02 14:47:21 crc kubenswrapper[4902]: I1202 14:47:21.137709 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64fa5872-7787-4029-b9ce-a33b115c5858" path="/var/lib/kubelet/pods/64fa5872-7787-4029-b9ce-a33b115c5858/volumes" Dec 02 14:47:21 crc kubenswrapper[4902]: I1202 14:47:21.138283 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-75k7n"] Dec 02 14:47:21 crc kubenswrapper[4902]: I1202 14:47:21.138311 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-th6wm"] Dec 02 14:47:22 crc kubenswrapper[4902]: I1202 14:47:22.029705 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-42af-account-create-update-dg87s"] Dec 02 14:47:22 crc kubenswrapper[4902]: I1202 14:47:22.037705 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-4a51-account-create-update-4fmkl"] Dec 02 14:47:22 crc kubenswrapper[4902]: I1202 14:47:22.046243 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-42af-account-create-update-dg87s"] Dec 02 14:47:22 crc kubenswrapper[4902]: I1202 14:47:22.056339 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-4a51-account-create-update-4fmkl"] Dec 02 14:47:22 crc kubenswrapper[4902]: I1202 14:47:22.107975 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:47:22 crc kubenswrapper[4902]: E1202 14:47:22.108306 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:47:23 crc kubenswrapper[4902]: I1202 14:47:23.129241 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bcd69e0-8303-46f6-bef1-37faec667798" path="/var/lib/kubelet/pods/2bcd69e0-8303-46f6-bef1-37faec667798/volumes" Dec 02 14:47:23 crc kubenswrapper[4902]: I1202 14:47:23.131073 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe" path="/var/lib/kubelet/pods/5ccd2d74-fe45-4bfe-9ac4-d525c23d4fbe/volumes" Dec 02 14:47:23 crc kubenswrapper[4902]: I1202 14:47:23.132459 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da153901-c3b2-469c-944b-48209050c85c" path="/var/lib/kubelet/pods/da153901-c3b2-469c-944b-48209050c85c/volumes" Dec 02 14:47:23 crc kubenswrapper[4902]: I1202 14:47:23.134074 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4c64af0-fa3b-4607-be3b-cda80bc8831c" path="/var/lib/kubelet/pods/e4c64af0-fa3b-4607-be3b-cda80bc8831c/volumes" Dec 02 14:47:30 crc kubenswrapper[4902]: I1202 14:47:30.749056 4902 scope.go:117] "RemoveContainer" containerID="08bd7f1c99f21758ef5105c0056679303417279d87a42fb34043709597a53525" Dec 02 14:47:30 crc kubenswrapper[4902]: I1202 14:47:30.776124 4902 scope.go:117] "RemoveContainer" containerID="552dfcf87005ca59310662896ad001ab43c897dbee455e5e12df34fa48d0fa4f" Dec 02 14:47:30 crc kubenswrapper[4902]: I1202 14:47:30.832271 4902 scope.go:117] "RemoveContainer" containerID="ac2746820e93d3dca61b09cd896fe5202274313fd3939ebad5d2142e0686875d" Dec 02 14:47:30 crc kubenswrapper[4902]: I1202 14:47:30.903451 4902 scope.go:117] "RemoveContainer" containerID="b78f5a1273a9946fd83c1f5d806d27ab77dbe36d7052343a8b3e9835f6adb6bd" Dec 02 14:47:30 crc kubenswrapper[4902]: I1202 14:47:30.962304 4902 scope.go:117] "RemoveContainer" containerID="3f338b0a4fb1d24b97326ae424a40e677f52b20c69e5b9785983513d6c3d26f8" Dec 02 14:47:31 crc kubenswrapper[4902]: I1202 14:47:31.024236 4902 scope.go:117] "RemoveContainer" containerID="483a36eabcdbe2b52f7778858d305f508bfc65ff0090f681d7b97610280b4e98" Dec 02 14:47:31 crc kubenswrapper[4902]: I1202 14:47:31.080701 4902 scope.go:117] "RemoveContainer" containerID="f7048a9cf7f52b14f09d41227b59b83e7947541cc16f806fa1acf9327b70ea9a" Dec 02 14:47:31 crc kubenswrapper[4902]: I1202 14:47:31.100001 4902 scope.go:117] "RemoveContainer" containerID="3c8201f92fd1f7fd470adbe18107a7bf7b6d8b7a231eae1e82cff6b8205915ba" Dec 02 14:47:34 crc kubenswrapper[4902]: I1202 14:47:34.108282 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:47:34 crc kubenswrapper[4902]: E1202 14:47:34.109395 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:47:48 crc kubenswrapper[4902]: I1202 14:47:48.106736 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:47:48 crc kubenswrapper[4902]: E1202 14:47:48.107461 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:47:56 crc kubenswrapper[4902]: I1202 14:47:56.043643 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-2tflw"] Dec 02 14:47:56 crc kubenswrapper[4902]: I1202 14:47:56.050989 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-2tflw"] Dec 02 14:47:57 crc kubenswrapper[4902]: I1202 14:47:57.118977 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f41756d-ba98-4fe9-9b76-7924f7879577" path="/var/lib/kubelet/pods/5f41756d-ba98-4fe9-9b76-7924f7879577/volumes" Dec 02 14:48:01 crc kubenswrapper[4902]: I1202 14:48:01.675465 4902 generic.go:334] "Generic (PLEG): container finished" podID="f5c865c3-fd5d-4bc9-bf96-d1f57ff89203" containerID="ebdd11faf7fe3871bb69a42b7956ffa184d8998a694c20576719334dd59a472b" exitCode=0 Dec 02 14:48:01 crc kubenswrapper[4902]: I1202 14:48:01.675598 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" event={"ID":"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203","Type":"ContainerDied","Data":"ebdd11faf7fe3871bb69a42b7956ffa184d8998a694c20576719334dd59a472b"} Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.107397 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:48:03 crc kubenswrapper[4902]: E1202 14:48:03.108210 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.119643 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.299922 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-inventory\") pod \"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203\" (UID: \"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203\") " Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.300060 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kmxg\" (UniqueName: \"kubernetes.io/projected/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-kube-api-access-5kmxg\") pod \"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203\" (UID: \"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203\") " Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.300293 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-ssh-key\") pod \"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203\" (UID: \"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203\") " Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.315800 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-kube-api-access-5kmxg" (OuterVolumeSpecName: "kube-api-access-5kmxg") pod "f5c865c3-fd5d-4bc9-bf96-d1f57ff89203" (UID: "f5c865c3-fd5d-4bc9-bf96-d1f57ff89203"). InnerVolumeSpecName "kube-api-access-5kmxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.329045 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-inventory" (OuterVolumeSpecName: "inventory") pod "f5c865c3-fd5d-4bc9-bf96-d1f57ff89203" (UID: "f5c865c3-fd5d-4bc9-bf96-d1f57ff89203"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.332725 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f5c865c3-fd5d-4bc9-bf96-d1f57ff89203" (UID: "f5c865c3-fd5d-4bc9-bf96-d1f57ff89203"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.402471 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.402501 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.402511 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kmxg\" (UniqueName: \"kubernetes.io/projected/f5c865c3-fd5d-4bc9-bf96-d1f57ff89203-kube-api-access-5kmxg\") on node \"crc\" DevicePath \"\"" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.696594 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" event={"ID":"f5c865c3-fd5d-4bc9-bf96-d1f57ff89203","Type":"ContainerDied","Data":"e5357370bbe406066bbc5e1bbd71b371101d52dc99a6aad035edeb01d373809a"} Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.696635 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5357370bbe406066bbc5e1bbd71b371101d52dc99a6aad035edeb01d373809a" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.696617 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.786412 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4"] Dec 02 14:48:03 crc kubenswrapper[4902]: E1202 14:48:03.786880 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5c865c3-fd5d-4bc9-bf96-d1f57ff89203" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.786895 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5c865c3-fd5d-4bc9-bf96-d1f57ff89203" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.787079 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5c865c3-fd5d-4bc9-bf96-d1f57ff89203" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.787725 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.790486 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.791158 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.791201 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.798792 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.804582 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4"] Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.912242 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/10beb027-7c11-43eb-8d82-782f41f49b1b-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4\" (UID: \"10beb027-7c11-43eb-8d82-782f41f49b1b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.912701 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/10beb027-7c11-43eb-8d82-782f41f49b1b-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4\" (UID: \"10beb027-7c11-43eb-8d82-782f41f49b1b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" Dec 02 14:48:03 crc kubenswrapper[4902]: I1202 14:48:03.912947 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwpb8\" (UniqueName: \"kubernetes.io/projected/10beb027-7c11-43eb-8d82-782f41f49b1b-kube-api-access-fwpb8\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4\" (UID: \"10beb027-7c11-43eb-8d82-782f41f49b1b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" Dec 02 14:48:04 crc kubenswrapper[4902]: I1202 14:48:04.015655 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwpb8\" (UniqueName: \"kubernetes.io/projected/10beb027-7c11-43eb-8d82-782f41f49b1b-kube-api-access-fwpb8\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4\" (UID: \"10beb027-7c11-43eb-8d82-782f41f49b1b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" Dec 02 14:48:04 crc kubenswrapper[4902]: I1202 14:48:04.015889 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/10beb027-7c11-43eb-8d82-782f41f49b1b-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4\" (UID: \"10beb027-7c11-43eb-8d82-782f41f49b1b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" Dec 02 14:48:04 crc kubenswrapper[4902]: I1202 14:48:04.016052 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/10beb027-7c11-43eb-8d82-782f41f49b1b-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4\" (UID: \"10beb027-7c11-43eb-8d82-782f41f49b1b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" Dec 02 14:48:04 crc kubenswrapper[4902]: I1202 14:48:04.022143 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/10beb027-7c11-43eb-8d82-782f41f49b1b-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4\" (UID: \"10beb027-7c11-43eb-8d82-782f41f49b1b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" Dec 02 14:48:04 crc kubenswrapper[4902]: I1202 14:48:04.022374 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/10beb027-7c11-43eb-8d82-782f41f49b1b-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4\" (UID: \"10beb027-7c11-43eb-8d82-782f41f49b1b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" Dec 02 14:48:04 crc kubenswrapper[4902]: I1202 14:48:04.050434 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwpb8\" (UniqueName: \"kubernetes.io/projected/10beb027-7c11-43eb-8d82-782f41f49b1b-kube-api-access-fwpb8\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4\" (UID: \"10beb027-7c11-43eb-8d82-782f41f49b1b\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" Dec 02 14:48:04 crc kubenswrapper[4902]: I1202 14:48:04.118536 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" Dec 02 14:48:04 crc kubenswrapper[4902]: I1202 14:48:04.666673 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4"] Dec 02 14:48:04 crc kubenswrapper[4902]: I1202 14:48:04.705397 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" event={"ID":"10beb027-7c11-43eb-8d82-782f41f49b1b","Type":"ContainerStarted","Data":"af1fbfa8c69087b7d502d0a230ab82d68a354177bd0a28715d70a91d39482574"} Dec 02 14:48:06 crc kubenswrapper[4902]: I1202 14:48:06.748765 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" event={"ID":"10beb027-7c11-43eb-8d82-782f41f49b1b","Type":"ContainerStarted","Data":"5b48a4cee24e637fac9ca91da8114f710abc5d7178efc280b202b418df5cb992"} Dec 02 14:48:06 crc kubenswrapper[4902]: I1202 14:48:06.775978 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" podStartSLOduration=2.907826657 podStartE2EDuration="3.77596062s" podCreationTimestamp="2025-12-02 14:48:03 +0000 UTC" firstStartedPulling="2025-12-02 14:48:04.669324981 +0000 UTC m=+1915.860633690" lastFinishedPulling="2025-12-02 14:48:05.537458944 +0000 UTC m=+1916.728767653" observedRunningTime="2025-12-02 14:48:06.775739374 +0000 UTC m=+1917.967048093" watchObservedRunningTime="2025-12-02 14:48:06.77596062 +0000 UTC m=+1917.967269329" Dec 02 14:48:11 crc kubenswrapper[4902]: I1202 14:48:11.794662 4902 generic.go:334] "Generic (PLEG): container finished" podID="10beb027-7c11-43eb-8d82-782f41f49b1b" containerID="5b48a4cee24e637fac9ca91da8114f710abc5d7178efc280b202b418df5cb992" exitCode=0 Dec 02 14:48:11 crc kubenswrapper[4902]: I1202 14:48:11.794762 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" event={"ID":"10beb027-7c11-43eb-8d82-782f41f49b1b","Type":"ContainerDied","Data":"5b48a4cee24e637fac9ca91da8114f710abc5d7178efc280b202b418df5cb992"} Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.229720 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.408042 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwpb8\" (UniqueName: \"kubernetes.io/projected/10beb027-7c11-43eb-8d82-782f41f49b1b-kube-api-access-fwpb8\") pod \"10beb027-7c11-43eb-8d82-782f41f49b1b\" (UID: \"10beb027-7c11-43eb-8d82-782f41f49b1b\") " Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.408094 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/10beb027-7c11-43eb-8d82-782f41f49b1b-ssh-key\") pod \"10beb027-7c11-43eb-8d82-782f41f49b1b\" (UID: \"10beb027-7c11-43eb-8d82-782f41f49b1b\") " Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.408344 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/10beb027-7c11-43eb-8d82-782f41f49b1b-inventory\") pod \"10beb027-7c11-43eb-8d82-782f41f49b1b\" (UID: \"10beb027-7c11-43eb-8d82-782f41f49b1b\") " Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.415920 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10beb027-7c11-43eb-8d82-782f41f49b1b-kube-api-access-fwpb8" (OuterVolumeSpecName: "kube-api-access-fwpb8") pod "10beb027-7c11-43eb-8d82-782f41f49b1b" (UID: "10beb027-7c11-43eb-8d82-782f41f49b1b"). InnerVolumeSpecName "kube-api-access-fwpb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.437825 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10beb027-7c11-43eb-8d82-782f41f49b1b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "10beb027-7c11-43eb-8d82-782f41f49b1b" (UID: "10beb027-7c11-43eb-8d82-782f41f49b1b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.472017 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10beb027-7c11-43eb-8d82-782f41f49b1b-inventory" (OuterVolumeSpecName: "inventory") pod "10beb027-7c11-43eb-8d82-782f41f49b1b" (UID: "10beb027-7c11-43eb-8d82-782f41f49b1b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.511404 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/10beb027-7c11-43eb-8d82-782f41f49b1b-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.511458 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwpb8\" (UniqueName: \"kubernetes.io/projected/10beb027-7c11-43eb-8d82-782f41f49b1b-kube-api-access-fwpb8\") on node \"crc\" DevicePath \"\"" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.511479 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/10beb027-7c11-43eb-8d82-782f41f49b1b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.829997 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" event={"ID":"10beb027-7c11-43eb-8d82-782f41f49b1b","Type":"ContainerDied","Data":"af1fbfa8c69087b7d502d0a230ab82d68a354177bd0a28715d70a91d39482574"} Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.830045 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af1fbfa8c69087b7d502d0a230ab82d68a354177bd0a28715d70a91d39482574" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.830126 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.901505 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z"] Dec 02 14:48:13 crc kubenswrapper[4902]: E1202 14:48:13.902042 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10beb027-7c11-43eb-8d82-782f41f49b1b" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.902073 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="10beb027-7c11-43eb-8d82-782f41f49b1b" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.902341 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="10beb027-7c11-43eb-8d82-782f41f49b1b" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.903310 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.906781 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.907021 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.907143 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.909038 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 14:48:13 crc kubenswrapper[4902]: I1202 14:48:13.914921 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z"] Dec 02 14:48:14 crc kubenswrapper[4902]: I1202 14:48:14.030640 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9g5z\" (UID: \"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" Dec 02 14:48:14 crc kubenswrapper[4902]: I1202 14:48:14.030998 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9g5z\" (UID: \"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" Dec 02 14:48:14 crc kubenswrapper[4902]: I1202 14:48:14.031030 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sm7s\" (UniqueName: \"kubernetes.io/projected/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-kube-api-access-7sm7s\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9g5z\" (UID: \"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" Dec 02 14:48:14 crc kubenswrapper[4902]: I1202 14:48:14.133384 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9g5z\" (UID: \"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" Dec 02 14:48:14 crc kubenswrapper[4902]: I1202 14:48:14.133474 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sm7s\" (UniqueName: \"kubernetes.io/projected/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-kube-api-access-7sm7s\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9g5z\" (UID: \"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" Dec 02 14:48:14 crc kubenswrapper[4902]: I1202 14:48:14.133836 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9g5z\" (UID: \"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" Dec 02 14:48:14 crc kubenswrapper[4902]: I1202 14:48:14.139308 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9g5z\" (UID: \"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" Dec 02 14:48:14 crc kubenswrapper[4902]: I1202 14:48:14.141325 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9g5z\" (UID: \"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" Dec 02 14:48:14 crc kubenswrapper[4902]: I1202 14:48:14.155675 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sm7s\" (UniqueName: \"kubernetes.io/projected/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-kube-api-access-7sm7s\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9g5z\" (UID: \"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" Dec 02 14:48:14 crc kubenswrapper[4902]: I1202 14:48:14.228825 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" Dec 02 14:48:14 crc kubenswrapper[4902]: I1202 14:48:14.798731 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z"] Dec 02 14:48:14 crc kubenswrapper[4902]: I1202 14:48:14.846358 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" event={"ID":"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628","Type":"ContainerStarted","Data":"f9a164dfbc8cc5533c7fba257eb23789a7c2cd3666e158840e96c10273582f97"} Dec 02 14:48:15 crc kubenswrapper[4902]: I1202 14:48:15.858736 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" event={"ID":"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628","Type":"ContainerStarted","Data":"34d284c3aedd6961be0e58a7875796abae03feb19314759106cde06477d6d0be"} Dec 02 14:48:15 crc kubenswrapper[4902]: I1202 14:48:15.875299 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" podStartSLOduration=2.375101069 podStartE2EDuration="2.87528261s" podCreationTimestamp="2025-12-02 14:48:13 +0000 UTC" firstStartedPulling="2025-12-02 14:48:14.792686172 +0000 UTC m=+1925.983994881" lastFinishedPulling="2025-12-02 14:48:15.292867673 +0000 UTC m=+1926.484176422" observedRunningTime="2025-12-02 14:48:15.873018936 +0000 UTC m=+1927.064327645" watchObservedRunningTime="2025-12-02 14:48:15.87528261 +0000 UTC m=+1927.066591319" Dec 02 14:48:18 crc kubenswrapper[4902]: I1202 14:48:18.107290 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:48:18 crc kubenswrapper[4902]: E1202 14:48:18.108022 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:48:19 crc kubenswrapper[4902]: I1202 14:48:19.046764 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-jx5xc"] Dec 02 14:48:19 crc kubenswrapper[4902]: I1202 14:48:19.054788 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-jx5xc"] Dec 02 14:48:19 crc kubenswrapper[4902]: I1202 14:48:19.116677 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="117c518f-dc8f-49d4-a389-a42e671ad97a" path="/var/lib/kubelet/pods/117c518f-dc8f-49d4-a389-a42e671ad97a/volumes" Dec 02 14:48:20 crc kubenswrapper[4902]: I1202 14:48:20.043992 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-ndfkx"] Dec 02 14:48:20 crc kubenswrapper[4902]: I1202 14:48:20.052535 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-ndfkx"] Dec 02 14:48:21 crc kubenswrapper[4902]: I1202 14:48:21.118649 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446" path="/var/lib/kubelet/pods/d9bbcd88-9d5a-48e0-8dbd-0ac1e0954446/volumes" Dec 02 14:48:31 crc kubenswrapper[4902]: I1202 14:48:31.580939 4902 patch_prober.go:28] interesting pod/observability-operator-d8bb48f5d-tghq9 container/operator namespace/openshift-operators: Liveness probe status=failure output="Get \"http://10.217.0.30:8081/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 14:48:31 crc kubenswrapper[4902]: I1202 14:48:31.581733 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-operators/observability-operator-d8bb48f5d-tghq9" podUID="c7fe3eed-6ecf-406a-9552-4f2a601eb860" containerName="operator" probeResult="failure" output="Get \"http://10.217.0.30:8081/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 14:48:31 crc kubenswrapper[4902]: I1202 14:48:31.964262 4902 scope.go:117] "RemoveContainer" containerID="b68210ce3c87dd829683b4c920f83244c49db2ba9db82ede14962ef90911d1e1" Dec 02 14:48:31 crc kubenswrapper[4902]: I1202 14:48:31.986094 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:48:31 crc kubenswrapper[4902]: E1202 14:48:31.986981 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:48:32 crc kubenswrapper[4902]: I1202 14:48:32.194969 4902 scope.go:117] "RemoveContainer" containerID="be48170d4b7c7fea223089a271804833260148431c52db31a0d5c5cff2a53a26" Dec 02 14:48:32 crc kubenswrapper[4902]: I1202 14:48:32.285822 4902 scope.go:117] "RemoveContainer" containerID="d7b951b59f2ae69c176ac3600b2428084bc60abc8ac64947ff54327ed0c426b0" Dec 02 14:48:47 crc kubenswrapper[4902]: I1202 14:48:47.107881 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:48:47 crc kubenswrapper[4902]: E1202 14:48:47.108581 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:49:00 crc kubenswrapper[4902]: I1202 14:49:00.364183 4902 generic.go:334] "Generic (PLEG): container finished" podID="0cb0ec2d-c764-45bf-a7e7-02d3f75d2628" containerID="34d284c3aedd6961be0e58a7875796abae03feb19314759106cde06477d6d0be" exitCode=0 Dec 02 14:49:00 crc kubenswrapper[4902]: I1202 14:49:00.364836 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" event={"ID":"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628","Type":"ContainerDied","Data":"34d284c3aedd6961be0e58a7875796abae03feb19314759106cde06477d6d0be"} Dec 02 14:49:01 crc kubenswrapper[4902]: I1202 14:49:01.107984 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:49:01 crc kubenswrapper[4902]: E1202 14:49:01.109199 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:49:01 crc kubenswrapper[4902]: I1202 14:49:01.854067 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.001108 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7sm7s\" (UniqueName: \"kubernetes.io/projected/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-kube-api-access-7sm7s\") pod \"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628\" (UID: \"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628\") " Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.001723 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-inventory\") pod \"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628\" (UID: \"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628\") " Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.001814 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-ssh-key\") pod \"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628\" (UID: \"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628\") " Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.006651 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-kube-api-access-7sm7s" (OuterVolumeSpecName: "kube-api-access-7sm7s") pod "0cb0ec2d-c764-45bf-a7e7-02d3f75d2628" (UID: "0cb0ec2d-c764-45bf-a7e7-02d3f75d2628"). InnerVolumeSpecName "kube-api-access-7sm7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.028454 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0cb0ec2d-c764-45bf-a7e7-02d3f75d2628" (UID: "0cb0ec2d-c764-45bf-a7e7-02d3f75d2628"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.032736 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-inventory" (OuterVolumeSpecName: "inventory") pod "0cb0ec2d-c764-45bf-a7e7-02d3f75d2628" (UID: "0cb0ec2d-c764-45bf-a7e7-02d3f75d2628"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.105125 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7sm7s\" (UniqueName: \"kubernetes.io/projected/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-kube-api-access-7sm7s\") on node \"crc\" DevicePath \"\"" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.105158 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.105169 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0cb0ec2d-c764-45bf-a7e7-02d3f75d2628-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.387208 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" event={"ID":"0cb0ec2d-c764-45bf-a7e7-02d3f75d2628","Type":"ContainerDied","Data":"f9a164dfbc8cc5533c7fba257eb23789a7c2cd3666e158840e96c10273582f97"} Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.387251 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9a164dfbc8cc5533c7fba257eb23789a7c2cd3666e158840e96c10273582f97" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.387291 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9g5z" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.487314 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5"] Dec 02 14:49:02 crc kubenswrapper[4902]: E1202 14:49:02.487867 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cb0ec2d-c764-45bf-a7e7-02d3f75d2628" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.487897 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cb0ec2d-c764-45bf-a7e7-02d3f75d2628" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.488157 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cb0ec2d-c764-45bf-a7e7-02d3f75d2628" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.489069 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.493127 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.493206 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.493326 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.493739 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.505215 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5"] Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.619467 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5\" (UID: \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.619529 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2x76\" (UniqueName: \"kubernetes.io/projected/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-kube-api-access-f2x76\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5\" (UID: \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.619749 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5\" (UID: \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.720965 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5\" (UID: \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.721047 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5\" (UID: \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.721077 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2x76\" (UniqueName: \"kubernetes.io/projected/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-kube-api-access-f2x76\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5\" (UID: \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.725166 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5\" (UID: \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.725451 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5\" (UID: \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.749259 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2x76\" (UniqueName: \"kubernetes.io/projected/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-kube-api-access-f2x76\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5\" (UID: \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" Dec 02 14:49:02 crc kubenswrapper[4902]: I1202 14:49:02.822389 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" Dec 02 14:49:03 crc kubenswrapper[4902]: I1202 14:49:03.349294 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5"] Dec 02 14:49:03 crc kubenswrapper[4902]: I1202 14:49:03.355144 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 14:49:03 crc kubenswrapper[4902]: I1202 14:49:03.396358 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" event={"ID":"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3","Type":"ContainerStarted","Data":"e9e0cbe1d2b137b7ce49bb991a707afcf92de368698d7ec856f392767615dd83"} Dec 02 14:49:04 crc kubenswrapper[4902]: I1202 14:49:04.042275 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-r24tk"] Dec 02 14:49:04 crc kubenswrapper[4902]: I1202 14:49:04.053951 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-r24tk"] Dec 02 14:49:05 crc kubenswrapper[4902]: I1202 14:49:05.122283 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a" path="/var/lib/kubelet/pods/0cbd9373-5a7f-4bfa-90e9-b3e2ad7e763a/volumes" Dec 02 14:49:05 crc kubenswrapper[4902]: I1202 14:49:05.423025 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" event={"ID":"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3","Type":"ContainerStarted","Data":"2f7d5bebbf2d568991e18b1ad114e94b09f938b84ac68f02a4a0e14194cf11f4"} Dec 02 14:49:05 crc kubenswrapper[4902]: I1202 14:49:05.448339 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" podStartSLOduration=2.49455222 podStartE2EDuration="3.448316357s" podCreationTimestamp="2025-12-02 14:49:02 +0000 UTC" firstStartedPulling="2025-12-02 14:49:03.35272449 +0000 UTC m=+1974.544033199" lastFinishedPulling="2025-12-02 14:49:04.306488617 +0000 UTC m=+1975.497797336" observedRunningTime="2025-12-02 14:49:05.441443721 +0000 UTC m=+1976.632752430" watchObservedRunningTime="2025-12-02 14:49:05.448316357 +0000 UTC m=+1976.639625076" Dec 02 14:49:14 crc kubenswrapper[4902]: I1202 14:49:14.106777 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:49:14 crc kubenswrapper[4902]: E1202 14:49:14.107548 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:49:27 crc kubenswrapper[4902]: I1202 14:49:27.106597 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:49:27 crc kubenswrapper[4902]: E1202 14:49:27.107520 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:49:32 crc kubenswrapper[4902]: I1202 14:49:32.443547 4902 scope.go:117] "RemoveContainer" containerID="1aadb9450c881314ebb72943ddf6f16dd075cdc525183bbeac66aacae169c39b" Dec 02 14:49:41 crc kubenswrapper[4902]: I1202 14:49:41.108013 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:49:41 crc kubenswrapper[4902]: I1202 14:49:41.812436 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"d7fd0c2463f17287d481fe5e84abc83416fa52dc118f1290102b095ff55ebf6f"} Dec 02 14:50:01 crc kubenswrapper[4902]: I1202 14:50:01.021416 4902 generic.go:334] "Generic (PLEG): container finished" podID="c491ad4c-9428-41e3-8c4d-bcd59ff50ca3" containerID="2f7d5bebbf2d568991e18b1ad114e94b09f938b84ac68f02a4a0e14194cf11f4" exitCode=0 Dec 02 14:50:01 crc kubenswrapper[4902]: I1202 14:50:01.021515 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" event={"ID":"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3","Type":"ContainerDied","Data":"2f7d5bebbf2d568991e18b1ad114e94b09f938b84ac68f02a4a0e14194cf11f4"} Dec 02 14:50:02 crc kubenswrapper[4902]: I1202 14:50:02.476167 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" Dec 02 14:50:02 crc kubenswrapper[4902]: I1202 14:50:02.640963 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-ssh-key\") pod \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\" (UID: \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\") " Dec 02 14:50:02 crc kubenswrapper[4902]: I1202 14:50:02.641100 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2x76\" (UniqueName: \"kubernetes.io/projected/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-kube-api-access-f2x76\") pod \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\" (UID: \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\") " Dec 02 14:50:02 crc kubenswrapper[4902]: I1202 14:50:02.641194 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-inventory\") pod \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\" (UID: \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\") " Dec 02 14:50:02 crc kubenswrapper[4902]: I1202 14:50:02.646699 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-kube-api-access-f2x76" (OuterVolumeSpecName: "kube-api-access-f2x76") pod "c491ad4c-9428-41e3-8c4d-bcd59ff50ca3" (UID: "c491ad4c-9428-41e3-8c4d-bcd59ff50ca3"). InnerVolumeSpecName "kube-api-access-f2x76". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:50:02 crc kubenswrapper[4902]: E1202 14:50:02.671685 4902 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-inventory podName:c491ad4c-9428-41e3-8c4d-bcd59ff50ca3 nodeName:}" failed. No retries permitted until 2025-12-02 14:50:03.171659856 +0000 UTC m=+2034.362968565 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "inventory" (UniqueName: "kubernetes.io/secret/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-inventory") pod "c491ad4c-9428-41e3-8c4d-bcd59ff50ca3" (UID: "c491ad4c-9428-41e3-8c4d-bcd59ff50ca3") : error deleting /var/lib/kubelet/pods/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3/volume-subpaths: remove /var/lib/kubelet/pods/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3/volume-subpaths: no such file or directory Dec 02 14:50:02 crc kubenswrapper[4902]: I1202 14:50:02.676248 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c491ad4c-9428-41e3-8c4d-bcd59ff50ca3" (UID: "c491ad4c-9428-41e3-8c4d-bcd59ff50ca3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:50:02 crc kubenswrapper[4902]: I1202 14:50:02.743234 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:50:02 crc kubenswrapper[4902]: I1202 14:50:02.743263 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2x76\" (UniqueName: \"kubernetes.io/projected/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-kube-api-access-f2x76\") on node \"crc\" DevicePath \"\"" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.042346 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" event={"ID":"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3","Type":"ContainerDied","Data":"e9e0cbe1d2b137b7ce49bb991a707afcf92de368698d7ec856f392767615dd83"} Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.042775 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9e0cbe1d2b137b7ce49bb991a707afcf92de368698d7ec856f392767615dd83" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.042396 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.134318 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-jgmdd"] Dec 02 14:50:03 crc kubenswrapper[4902]: E1202 14:50:03.135107 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c491ad4c-9428-41e3-8c4d-bcd59ff50ca3" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.135128 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="c491ad4c-9428-41e3-8c4d-bcd59ff50ca3" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.135338 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="c491ad4c-9428-41e3-8c4d-bcd59ff50ca3" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.135995 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.186144 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-jgmdd"] Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.251695 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-inventory\") pod \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\" (UID: \"c491ad4c-9428-41e3-8c4d-bcd59ff50ca3\") " Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.252193 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/77add5e2-5a0c-4a99-a46e-80645017ee83-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-jgmdd\" (UID: \"77add5e2-5a0c-4a99-a46e-80645017ee83\") " pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.252233 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km77m\" (UniqueName: \"kubernetes.io/projected/77add5e2-5a0c-4a99-a46e-80645017ee83-kube-api-access-km77m\") pod \"ssh-known-hosts-edpm-deployment-jgmdd\" (UID: \"77add5e2-5a0c-4a99-a46e-80645017ee83\") " pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.252346 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/77add5e2-5a0c-4a99-a46e-80645017ee83-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-jgmdd\" (UID: \"77add5e2-5a0c-4a99-a46e-80645017ee83\") " pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.257896 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-inventory" (OuterVolumeSpecName: "inventory") pod "c491ad4c-9428-41e3-8c4d-bcd59ff50ca3" (UID: "c491ad4c-9428-41e3-8c4d-bcd59ff50ca3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.354390 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/77add5e2-5a0c-4a99-a46e-80645017ee83-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-jgmdd\" (UID: \"77add5e2-5a0c-4a99-a46e-80645017ee83\") " pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.354461 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km77m\" (UniqueName: \"kubernetes.io/projected/77add5e2-5a0c-4a99-a46e-80645017ee83-kube-api-access-km77m\") pod \"ssh-known-hosts-edpm-deployment-jgmdd\" (UID: \"77add5e2-5a0c-4a99-a46e-80645017ee83\") " pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.354545 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/77add5e2-5a0c-4a99-a46e-80645017ee83-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-jgmdd\" (UID: \"77add5e2-5a0c-4a99-a46e-80645017ee83\") " pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.354648 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c491ad4c-9428-41e3-8c4d-bcd59ff50ca3-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.358189 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/77add5e2-5a0c-4a99-a46e-80645017ee83-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-jgmdd\" (UID: \"77add5e2-5a0c-4a99-a46e-80645017ee83\") " pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.358332 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/77add5e2-5a0c-4a99-a46e-80645017ee83-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-jgmdd\" (UID: \"77add5e2-5a0c-4a99-a46e-80645017ee83\") " pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.372264 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km77m\" (UniqueName: \"kubernetes.io/projected/77add5e2-5a0c-4a99-a46e-80645017ee83-kube-api-access-km77m\") pod \"ssh-known-hosts-edpm-deployment-jgmdd\" (UID: \"77add5e2-5a0c-4a99-a46e-80645017ee83\") " pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.454045 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" Dec 02 14:50:03 crc kubenswrapper[4902]: I1202 14:50:03.997718 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-jgmdd"] Dec 02 14:50:04 crc kubenswrapper[4902]: I1202 14:50:04.055947 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" event={"ID":"77add5e2-5a0c-4a99-a46e-80645017ee83","Type":"ContainerStarted","Data":"d3314128e6a162dc8b1eb0e353cd141292cbee2c2516f5147975b1e2e791a7e2"} Dec 02 14:50:07 crc kubenswrapper[4902]: I1202 14:50:07.081186 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" event={"ID":"77add5e2-5a0c-4a99-a46e-80645017ee83","Type":"ContainerStarted","Data":"a52ec78fb6cffcc887562492674b6d138e98f497fab9019f5ab8a86c442f9249"} Dec 02 14:50:07 crc kubenswrapper[4902]: I1202 14:50:07.103773 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" podStartSLOduration=1.777329328 podStartE2EDuration="4.103549887s" podCreationTimestamp="2025-12-02 14:50:03 +0000 UTC" firstStartedPulling="2025-12-02 14:50:04.002017361 +0000 UTC m=+2035.193326070" lastFinishedPulling="2025-12-02 14:50:06.3282379 +0000 UTC m=+2037.519546629" observedRunningTime="2025-12-02 14:50:07.096328861 +0000 UTC m=+2038.287637570" watchObservedRunningTime="2025-12-02 14:50:07.103549887 +0000 UTC m=+2038.294858596" Dec 02 14:50:14 crc kubenswrapper[4902]: I1202 14:50:14.147357 4902 generic.go:334] "Generic (PLEG): container finished" podID="77add5e2-5a0c-4a99-a46e-80645017ee83" containerID="a52ec78fb6cffcc887562492674b6d138e98f497fab9019f5ab8a86c442f9249" exitCode=0 Dec 02 14:50:14 crc kubenswrapper[4902]: I1202 14:50:14.148220 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" event={"ID":"77add5e2-5a0c-4a99-a46e-80645017ee83","Type":"ContainerDied","Data":"a52ec78fb6cffcc887562492674b6d138e98f497fab9019f5ab8a86c442f9249"} Dec 02 14:50:15 crc kubenswrapper[4902]: I1202 14:50:15.845950 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" Dec 02 14:50:15 crc kubenswrapper[4902]: I1202 14:50:15.955677 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-km77m\" (UniqueName: \"kubernetes.io/projected/77add5e2-5a0c-4a99-a46e-80645017ee83-kube-api-access-km77m\") pod \"77add5e2-5a0c-4a99-a46e-80645017ee83\" (UID: \"77add5e2-5a0c-4a99-a46e-80645017ee83\") " Dec 02 14:50:15 crc kubenswrapper[4902]: I1202 14:50:15.955931 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/77add5e2-5a0c-4a99-a46e-80645017ee83-ssh-key-openstack-edpm-ipam\") pod \"77add5e2-5a0c-4a99-a46e-80645017ee83\" (UID: \"77add5e2-5a0c-4a99-a46e-80645017ee83\") " Dec 02 14:50:15 crc kubenswrapper[4902]: I1202 14:50:15.956037 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/77add5e2-5a0c-4a99-a46e-80645017ee83-inventory-0\") pod \"77add5e2-5a0c-4a99-a46e-80645017ee83\" (UID: \"77add5e2-5a0c-4a99-a46e-80645017ee83\") " Dec 02 14:50:15 crc kubenswrapper[4902]: I1202 14:50:15.960930 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77add5e2-5a0c-4a99-a46e-80645017ee83-kube-api-access-km77m" (OuterVolumeSpecName: "kube-api-access-km77m") pod "77add5e2-5a0c-4a99-a46e-80645017ee83" (UID: "77add5e2-5a0c-4a99-a46e-80645017ee83"). InnerVolumeSpecName "kube-api-access-km77m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:50:15 crc kubenswrapper[4902]: I1202 14:50:15.982505 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77add5e2-5a0c-4a99-a46e-80645017ee83-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "77add5e2-5a0c-4a99-a46e-80645017ee83" (UID: "77add5e2-5a0c-4a99-a46e-80645017ee83"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:50:15 crc kubenswrapper[4902]: I1202 14:50:15.982531 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77add5e2-5a0c-4a99-a46e-80645017ee83-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "77add5e2-5a0c-4a99-a46e-80645017ee83" (UID: "77add5e2-5a0c-4a99-a46e-80645017ee83"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.057932 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/77add5e2-5a0c-4a99-a46e-80645017ee83-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.057969 4902 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/77add5e2-5a0c-4a99-a46e-80645017ee83-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.057979 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-km77m\" (UniqueName: \"kubernetes.io/projected/77add5e2-5a0c-4a99-a46e-80645017ee83-kube-api-access-km77m\") on node \"crc\" DevicePath \"\"" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.176773 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" event={"ID":"77add5e2-5a0c-4a99-a46e-80645017ee83","Type":"ContainerDied","Data":"d3314128e6a162dc8b1eb0e353cd141292cbee2c2516f5147975b1e2e791a7e2"} Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.176825 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3314128e6a162dc8b1eb0e353cd141292cbee2c2516f5147975b1e2e791a7e2" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.176894 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-jgmdd" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.260322 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f"] Dec 02 14:50:16 crc kubenswrapper[4902]: E1202 14:50:16.260843 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77add5e2-5a0c-4a99-a46e-80645017ee83" containerName="ssh-known-hosts-edpm-deployment" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.260866 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="77add5e2-5a0c-4a99-a46e-80645017ee83" containerName="ssh-known-hosts-edpm-deployment" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.261135 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="77add5e2-5a0c-4a99-a46e-80645017ee83" containerName="ssh-known-hosts-edpm-deployment" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.261815 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.264082 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.264094 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.264352 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.264497 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.274264 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f"] Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.361951 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c20b53d-a975-44ae-8ec3-126956995caf-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4zh2f\" (UID: \"2c20b53d-a975-44ae-8ec3-126956995caf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.362066 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c20b53d-a975-44ae-8ec3-126956995caf-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4zh2f\" (UID: \"2c20b53d-a975-44ae-8ec3-126956995caf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.362102 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hffm\" (UniqueName: \"kubernetes.io/projected/2c20b53d-a975-44ae-8ec3-126956995caf-kube-api-access-4hffm\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4zh2f\" (UID: \"2c20b53d-a975-44ae-8ec3-126956995caf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.463823 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c20b53d-a975-44ae-8ec3-126956995caf-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4zh2f\" (UID: \"2c20b53d-a975-44ae-8ec3-126956995caf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.463874 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hffm\" (UniqueName: \"kubernetes.io/projected/2c20b53d-a975-44ae-8ec3-126956995caf-kube-api-access-4hffm\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4zh2f\" (UID: \"2c20b53d-a975-44ae-8ec3-126956995caf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.463991 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c20b53d-a975-44ae-8ec3-126956995caf-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4zh2f\" (UID: \"2c20b53d-a975-44ae-8ec3-126956995caf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.468595 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c20b53d-a975-44ae-8ec3-126956995caf-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4zh2f\" (UID: \"2c20b53d-a975-44ae-8ec3-126956995caf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.468650 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c20b53d-a975-44ae-8ec3-126956995caf-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4zh2f\" (UID: \"2c20b53d-a975-44ae-8ec3-126956995caf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.481708 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hffm\" (UniqueName: \"kubernetes.io/projected/2c20b53d-a975-44ae-8ec3-126956995caf-kube-api-access-4hffm\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4zh2f\" (UID: \"2c20b53d-a975-44ae-8ec3-126956995caf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" Dec 02 14:50:16 crc kubenswrapper[4902]: I1202 14:50:16.595932 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" Dec 02 14:50:17 crc kubenswrapper[4902]: W1202 14:50:17.214912 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c20b53d_a975_44ae_8ec3_126956995caf.slice/crio-2208e71a2b9c131a9633ea3c9259ee7b4766c300dd053f989c159c92f10cf550 WatchSource:0}: Error finding container 2208e71a2b9c131a9633ea3c9259ee7b4766c300dd053f989c159c92f10cf550: Status 404 returned error can't find the container with id 2208e71a2b9c131a9633ea3c9259ee7b4766c300dd053f989c159c92f10cf550 Dec 02 14:50:17 crc kubenswrapper[4902]: I1202 14:50:17.217414 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f"] Dec 02 14:50:18 crc kubenswrapper[4902]: I1202 14:50:18.212484 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" event={"ID":"2c20b53d-a975-44ae-8ec3-126956995caf","Type":"ContainerStarted","Data":"2208e71a2b9c131a9633ea3c9259ee7b4766c300dd053f989c159c92f10cf550"} Dec 02 14:50:19 crc kubenswrapper[4902]: I1202 14:50:19.223921 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" event={"ID":"2c20b53d-a975-44ae-8ec3-126956995caf","Type":"ContainerStarted","Data":"07ac4d7c60f52f9d46bf0613cfd44713f54cd2d140d5484c3b0a1aadcb64b696"} Dec 02 14:50:19 crc kubenswrapper[4902]: I1202 14:50:19.244238 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" podStartSLOduration=2.373163731 podStartE2EDuration="3.244215507s" podCreationTimestamp="2025-12-02 14:50:16 +0000 UTC" firstStartedPulling="2025-12-02 14:50:17.217305632 +0000 UTC m=+2048.408614341" lastFinishedPulling="2025-12-02 14:50:18.088357408 +0000 UTC m=+2049.279666117" observedRunningTime="2025-12-02 14:50:19.239294797 +0000 UTC m=+2050.430603526" watchObservedRunningTime="2025-12-02 14:50:19.244215507 +0000 UTC m=+2050.435524216" Dec 02 14:50:27 crc kubenswrapper[4902]: I1202 14:50:27.298777 4902 generic.go:334] "Generic (PLEG): container finished" podID="2c20b53d-a975-44ae-8ec3-126956995caf" containerID="07ac4d7c60f52f9d46bf0613cfd44713f54cd2d140d5484c3b0a1aadcb64b696" exitCode=0 Dec 02 14:50:27 crc kubenswrapper[4902]: I1202 14:50:27.298878 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" event={"ID":"2c20b53d-a975-44ae-8ec3-126956995caf","Type":"ContainerDied","Data":"07ac4d7c60f52f9d46bf0613cfd44713f54cd2d140d5484c3b0a1aadcb64b696"} Dec 02 14:50:28 crc kubenswrapper[4902]: I1202 14:50:28.768650 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" Dec 02 14:50:28 crc kubenswrapper[4902]: I1202 14:50:28.956457 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c20b53d-a975-44ae-8ec3-126956995caf-ssh-key\") pod \"2c20b53d-a975-44ae-8ec3-126956995caf\" (UID: \"2c20b53d-a975-44ae-8ec3-126956995caf\") " Dec 02 14:50:28 crc kubenswrapper[4902]: I1202 14:50:28.956550 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c20b53d-a975-44ae-8ec3-126956995caf-inventory\") pod \"2c20b53d-a975-44ae-8ec3-126956995caf\" (UID: \"2c20b53d-a975-44ae-8ec3-126956995caf\") " Dec 02 14:50:28 crc kubenswrapper[4902]: I1202 14:50:28.956736 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hffm\" (UniqueName: \"kubernetes.io/projected/2c20b53d-a975-44ae-8ec3-126956995caf-kube-api-access-4hffm\") pod \"2c20b53d-a975-44ae-8ec3-126956995caf\" (UID: \"2c20b53d-a975-44ae-8ec3-126956995caf\") " Dec 02 14:50:28 crc kubenswrapper[4902]: I1202 14:50:28.963985 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c20b53d-a975-44ae-8ec3-126956995caf-kube-api-access-4hffm" (OuterVolumeSpecName: "kube-api-access-4hffm") pod "2c20b53d-a975-44ae-8ec3-126956995caf" (UID: "2c20b53d-a975-44ae-8ec3-126956995caf"). InnerVolumeSpecName "kube-api-access-4hffm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.008646 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c20b53d-a975-44ae-8ec3-126956995caf-inventory" (OuterVolumeSpecName: "inventory") pod "2c20b53d-a975-44ae-8ec3-126956995caf" (UID: "2c20b53d-a975-44ae-8ec3-126956995caf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.011204 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c20b53d-a975-44ae-8ec3-126956995caf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2c20b53d-a975-44ae-8ec3-126956995caf" (UID: "2c20b53d-a975-44ae-8ec3-126956995caf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.060174 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c20b53d-a975-44ae-8ec3-126956995caf-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.060230 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c20b53d-a975-44ae-8ec3-126956995caf-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.060252 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hffm\" (UniqueName: \"kubernetes.io/projected/2c20b53d-a975-44ae-8ec3-126956995caf-kube-api-access-4hffm\") on node \"crc\" DevicePath \"\"" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.324716 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" event={"ID":"2c20b53d-a975-44ae-8ec3-126956995caf","Type":"ContainerDied","Data":"2208e71a2b9c131a9633ea3c9259ee7b4766c300dd053f989c159c92f10cf550"} Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.324751 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2208e71a2b9c131a9633ea3c9259ee7b4766c300dd053f989c159c92f10cf550" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.324803 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4zh2f" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.424000 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf"] Dec 02 14:50:29 crc kubenswrapper[4902]: E1202 14:50:29.424444 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c20b53d-a975-44ae-8ec3-126956995caf" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.424465 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c20b53d-a975-44ae-8ec3-126956995caf" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.424761 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c20b53d-a975-44ae-8ec3-126956995caf" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.425545 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.428534 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.428624 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.430362 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.434172 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.440966 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf"] Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.570485 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvz8l\" (UniqueName: \"kubernetes.io/projected/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-kube-api-access-cvz8l\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf\" (UID: \"0327d28d-2fe0-4940-b8b7-5a805b1d89bc\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.570617 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf\" (UID: \"0327d28d-2fe0-4940-b8b7-5a805b1d89bc\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.570815 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf\" (UID: \"0327d28d-2fe0-4940-b8b7-5a805b1d89bc\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.673925 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvz8l\" (UniqueName: \"kubernetes.io/projected/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-kube-api-access-cvz8l\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf\" (UID: \"0327d28d-2fe0-4940-b8b7-5a805b1d89bc\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.673996 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf\" (UID: \"0327d28d-2fe0-4940-b8b7-5a805b1d89bc\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.674079 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf\" (UID: \"0327d28d-2fe0-4940-b8b7-5a805b1d89bc\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.679947 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf\" (UID: \"0327d28d-2fe0-4940-b8b7-5a805b1d89bc\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.680003 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf\" (UID: \"0327d28d-2fe0-4940-b8b7-5a805b1d89bc\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.693877 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvz8l\" (UniqueName: \"kubernetes.io/projected/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-kube-api-access-cvz8l\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf\" (UID: \"0327d28d-2fe0-4940-b8b7-5a805b1d89bc\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" Dec 02 14:50:29 crc kubenswrapper[4902]: I1202 14:50:29.783288 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" Dec 02 14:50:30 crc kubenswrapper[4902]: I1202 14:50:30.316921 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf"] Dec 02 14:50:30 crc kubenswrapper[4902]: I1202 14:50:30.333980 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" event={"ID":"0327d28d-2fe0-4940-b8b7-5a805b1d89bc","Type":"ContainerStarted","Data":"dd8e4a5204859372ac95b71b0a0cbfc291d27667cdf0fe6e35e2faa06df2a8ba"} Dec 02 14:50:32 crc kubenswrapper[4902]: I1202 14:50:32.364654 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" event={"ID":"0327d28d-2fe0-4940-b8b7-5a805b1d89bc","Type":"ContainerStarted","Data":"b7f19ed7f5f3a1d923e252b62dab89368a318e28362b65f1473a1e4349716222"} Dec 02 14:50:32 crc kubenswrapper[4902]: I1202 14:50:32.389243 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" podStartSLOduration=2.603669492 podStartE2EDuration="3.38922356s" podCreationTimestamp="2025-12-02 14:50:29 +0000 UTC" firstStartedPulling="2025-12-02 14:50:30.319814768 +0000 UTC m=+2061.511123487" lastFinishedPulling="2025-12-02 14:50:31.105368836 +0000 UTC m=+2062.296677555" observedRunningTime="2025-12-02 14:50:32.382276043 +0000 UTC m=+2063.573584762" watchObservedRunningTime="2025-12-02 14:50:32.38922356 +0000 UTC m=+2063.580532269" Dec 02 14:50:42 crc kubenswrapper[4902]: I1202 14:50:42.477284 4902 generic.go:334] "Generic (PLEG): container finished" podID="0327d28d-2fe0-4940-b8b7-5a805b1d89bc" containerID="b7f19ed7f5f3a1d923e252b62dab89368a318e28362b65f1473a1e4349716222" exitCode=0 Dec 02 14:50:42 crc kubenswrapper[4902]: I1202 14:50:42.477399 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" event={"ID":"0327d28d-2fe0-4940-b8b7-5a805b1d89bc","Type":"ContainerDied","Data":"b7f19ed7f5f3a1d923e252b62dab89368a318e28362b65f1473a1e4349716222"} Dec 02 14:50:43 crc kubenswrapper[4902]: I1202 14:50:43.926489 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" Dec 02 14:50:43 crc kubenswrapper[4902]: I1202 14:50:43.977451 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvz8l\" (UniqueName: \"kubernetes.io/projected/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-kube-api-access-cvz8l\") pod \"0327d28d-2fe0-4940-b8b7-5a805b1d89bc\" (UID: \"0327d28d-2fe0-4940-b8b7-5a805b1d89bc\") " Dec 02 14:50:43 crc kubenswrapper[4902]: I1202 14:50:43.978258 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-inventory\") pod \"0327d28d-2fe0-4940-b8b7-5a805b1d89bc\" (UID: \"0327d28d-2fe0-4940-b8b7-5a805b1d89bc\") " Dec 02 14:50:43 crc kubenswrapper[4902]: I1202 14:50:43.978732 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-ssh-key\") pod \"0327d28d-2fe0-4940-b8b7-5a805b1d89bc\" (UID: \"0327d28d-2fe0-4940-b8b7-5a805b1d89bc\") " Dec 02 14:50:43 crc kubenswrapper[4902]: I1202 14:50:43.986989 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-kube-api-access-cvz8l" (OuterVolumeSpecName: "kube-api-access-cvz8l") pod "0327d28d-2fe0-4940-b8b7-5a805b1d89bc" (UID: "0327d28d-2fe0-4940-b8b7-5a805b1d89bc"). InnerVolumeSpecName "kube-api-access-cvz8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.010706 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-inventory" (OuterVolumeSpecName: "inventory") pod "0327d28d-2fe0-4940-b8b7-5a805b1d89bc" (UID: "0327d28d-2fe0-4940-b8b7-5a805b1d89bc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.019727 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0327d28d-2fe0-4940-b8b7-5a805b1d89bc" (UID: "0327d28d-2fe0-4940-b8b7-5a805b1d89bc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.083443 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.083495 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.083514 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvz8l\" (UniqueName: \"kubernetes.io/projected/0327d28d-2fe0-4940-b8b7-5a805b1d89bc-kube-api-access-cvz8l\") on node \"crc\" DevicePath \"\"" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.557945 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" event={"ID":"0327d28d-2fe0-4940-b8b7-5a805b1d89bc","Type":"ContainerDied","Data":"dd8e4a5204859372ac95b71b0a0cbfc291d27667cdf0fe6e35e2faa06df2a8ba"} Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.558226 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd8e4a5204859372ac95b71b0a0cbfc291d27667cdf0fe6e35e2faa06df2a8ba" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.558085 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.611183 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp"] Dec 02 14:50:44 crc kubenswrapper[4902]: E1202 14:50:44.611686 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0327d28d-2fe0-4940-b8b7-5a805b1d89bc" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.611712 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="0327d28d-2fe0-4940-b8b7-5a805b1d89bc" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.611956 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="0327d28d-2fe0-4940-b8b7-5a805b1d89bc" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.612784 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.615475 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.616998 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.617185 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.617477 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.617489 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.617683 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.618209 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.619016 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.651403 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp"] Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.694642 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.694707 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.694741 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.694812 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.694868 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.694895 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.694923 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.695060 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.695182 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.695245 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.695271 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbrt9\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-kube-api-access-pbrt9\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.695350 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.695409 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.695469 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.797297 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.797408 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.797451 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.797489 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.797528 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.797590 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.797646 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.797697 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.797800 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.797884 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.797923 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbrt9\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-kube-api-access-pbrt9\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.797979 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.798037 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.798094 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.803122 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.806116 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.807068 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.807261 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.807386 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.807447 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.807490 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.809246 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.810988 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.811226 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.811788 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.812739 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.815022 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.824167 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbrt9\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-kube-api-access-pbrt9\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:44 crc kubenswrapper[4902]: I1202 14:50:44.951276 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:50:45 crc kubenswrapper[4902]: I1202 14:50:45.525932 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp"] Dec 02 14:50:45 crc kubenswrapper[4902]: W1202 14:50:45.527859 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53ecd2a3_800b_4718_bf18_8b77eb9bbbe8.slice/crio-52f139bde19fe481004977716505ed049d834ff13f9e95b8133404b79ac30ecb WatchSource:0}: Error finding container 52f139bde19fe481004977716505ed049d834ff13f9e95b8133404b79ac30ecb: Status 404 returned error can't find the container with id 52f139bde19fe481004977716505ed049d834ff13f9e95b8133404b79ac30ecb Dec 02 14:50:45 crc kubenswrapper[4902]: I1202 14:50:45.568955 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" event={"ID":"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8","Type":"ContainerStarted","Data":"52f139bde19fe481004977716505ed049d834ff13f9e95b8133404b79ac30ecb"} Dec 02 14:50:47 crc kubenswrapper[4902]: I1202 14:50:47.597178 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" event={"ID":"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8","Type":"ContainerStarted","Data":"c2cc5c6cb265780fc3f32c98f13d6fa3e6c31468d655cd227580191a41d2e1b2"} Dec 02 14:51:15 crc kubenswrapper[4902]: I1202 14:51:15.531670 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" podStartSLOduration=30.980076509 podStartE2EDuration="31.53165019s" podCreationTimestamp="2025-12-02 14:50:44 +0000 UTC" firstStartedPulling="2025-12-02 14:50:45.529888892 +0000 UTC m=+2076.721197621" lastFinishedPulling="2025-12-02 14:50:46.081462553 +0000 UTC m=+2077.272771302" observedRunningTime="2025-12-02 14:50:47.635099847 +0000 UTC m=+2078.826408626" watchObservedRunningTime="2025-12-02 14:51:15.53165019 +0000 UTC m=+2106.722958899" Dec 02 14:51:15 crc kubenswrapper[4902]: I1202 14:51:15.538972 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rzgk6"] Dec 02 14:51:15 crc kubenswrapper[4902]: I1202 14:51:15.541214 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:15 crc kubenswrapper[4902]: I1202 14:51:15.558023 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rzgk6"] Dec 02 14:51:15 crc kubenswrapper[4902]: I1202 14:51:15.639507 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-utilities\") pod \"redhat-operators-rzgk6\" (UID: \"1a8a6082-fbd2-43c0-acd3-b251c589ed0f\") " pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:15 crc kubenswrapper[4902]: I1202 14:51:15.639663 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-catalog-content\") pod \"redhat-operators-rzgk6\" (UID: \"1a8a6082-fbd2-43c0-acd3-b251c589ed0f\") " pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:15 crc kubenswrapper[4902]: I1202 14:51:15.639883 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkfgh\" (UniqueName: \"kubernetes.io/projected/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-kube-api-access-vkfgh\") pod \"redhat-operators-rzgk6\" (UID: \"1a8a6082-fbd2-43c0-acd3-b251c589ed0f\") " pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:15 crc kubenswrapper[4902]: I1202 14:51:15.741375 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-utilities\") pod \"redhat-operators-rzgk6\" (UID: \"1a8a6082-fbd2-43c0-acd3-b251c589ed0f\") " pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:15 crc kubenswrapper[4902]: I1202 14:51:15.741452 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-catalog-content\") pod \"redhat-operators-rzgk6\" (UID: \"1a8a6082-fbd2-43c0-acd3-b251c589ed0f\") " pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:15 crc kubenswrapper[4902]: I1202 14:51:15.741549 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkfgh\" (UniqueName: \"kubernetes.io/projected/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-kube-api-access-vkfgh\") pod \"redhat-operators-rzgk6\" (UID: \"1a8a6082-fbd2-43c0-acd3-b251c589ed0f\") " pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:15 crc kubenswrapper[4902]: I1202 14:51:15.742241 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-catalog-content\") pod \"redhat-operators-rzgk6\" (UID: \"1a8a6082-fbd2-43c0-acd3-b251c589ed0f\") " pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:15 crc kubenswrapper[4902]: I1202 14:51:15.742303 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-utilities\") pod \"redhat-operators-rzgk6\" (UID: \"1a8a6082-fbd2-43c0-acd3-b251c589ed0f\") " pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:15 crc kubenswrapper[4902]: I1202 14:51:15.761758 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkfgh\" (UniqueName: \"kubernetes.io/projected/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-kube-api-access-vkfgh\") pod \"redhat-operators-rzgk6\" (UID: \"1a8a6082-fbd2-43c0-acd3-b251c589ed0f\") " pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:15 crc kubenswrapper[4902]: I1202 14:51:15.873544 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:16 crc kubenswrapper[4902]: I1202 14:51:16.335710 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rzgk6"] Dec 02 14:51:16 crc kubenswrapper[4902]: W1202 14:51:16.335926 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1a8a6082_fbd2_43c0_acd3_b251c589ed0f.slice/crio-77bb0d4b68d7ee23b4bcdbf7f757e736c30b155faf592de1e76cf52f8be72227 WatchSource:0}: Error finding container 77bb0d4b68d7ee23b4bcdbf7f757e736c30b155faf592de1e76cf52f8be72227: Status 404 returned error can't find the container with id 77bb0d4b68d7ee23b4bcdbf7f757e736c30b155faf592de1e76cf52f8be72227 Dec 02 14:51:16 crc kubenswrapper[4902]: I1202 14:51:16.910292 4902 generic.go:334] "Generic (PLEG): container finished" podID="1a8a6082-fbd2-43c0-acd3-b251c589ed0f" containerID="48e39fc782837745b5bfa499362f37c0c2998b8addd7928b3da7ee45bd705248" exitCode=0 Dec 02 14:51:16 crc kubenswrapper[4902]: I1202 14:51:16.910578 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzgk6" event={"ID":"1a8a6082-fbd2-43c0-acd3-b251c589ed0f","Type":"ContainerDied","Data":"48e39fc782837745b5bfa499362f37c0c2998b8addd7928b3da7ee45bd705248"} Dec 02 14:51:16 crc kubenswrapper[4902]: I1202 14:51:16.910609 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzgk6" event={"ID":"1a8a6082-fbd2-43c0-acd3-b251c589ed0f","Type":"ContainerStarted","Data":"77bb0d4b68d7ee23b4bcdbf7f757e736c30b155faf592de1e76cf52f8be72227"} Dec 02 14:51:18 crc kubenswrapper[4902]: I1202 14:51:18.932006 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzgk6" event={"ID":"1a8a6082-fbd2-43c0-acd3-b251c589ed0f","Type":"ContainerStarted","Data":"0bde9ca3b8c30050d4a3b9b2ab8333c4b3509599dfe0f0e99c45b021ef82023d"} Dec 02 14:51:20 crc kubenswrapper[4902]: I1202 14:51:20.956441 4902 generic.go:334] "Generic (PLEG): container finished" podID="1a8a6082-fbd2-43c0-acd3-b251c589ed0f" containerID="0bde9ca3b8c30050d4a3b9b2ab8333c4b3509599dfe0f0e99c45b021ef82023d" exitCode=0 Dec 02 14:51:20 crc kubenswrapper[4902]: I1202 14:51:20.956533 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzgk6" event={"ID":"1a8a6082-fbd2-43c0-acd3-b251c589ed0f","Type":"ContainerDied","Data":"0bde9ca3b8c30050d4a3b9b2ab8333c4b3509599dfe0f0e99c45b021ef82023d"} Dec 02 14:51:23 crc kubenswrapper[4902]: I1202 14:51:23.986339 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzgk6" event={"ID":"1a8a6082-fbd2-43c0-acd3-b251c589ed0f","Type":"ContainerStarted","Data":"dd0494ac1c8678f51a4024d0c9f646e6205213c15d9789fc3e4b1085207762dd"} Dec 02 14:51:24 crc kubenswrapper[4902]: I1202 14:51:24.025911 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rzgk6" podStartSLOduration=2.877959798 podStartE2EDuration="9.025893192s" podCreationTimestamp="2025-12-02 14:51:15 +0000 UTC" firstStartedPulling="2025-12-02 14:51:16.912728647 +0000 UTC m=+2108.104037356" lastFinishedPulling="2025-12-02 14:51:23.060662041 +0000 UTC m=+2114.251970750" observedRunningTime="2025-12-02 14:51:24.004045042 +0000 UTC m=+2115.195353781" watchObservedRunningTime="2025-12-02 14:51:24.025893192 +0000 UTC m=+2115.217201901" Dec 02 14:51:25 crc kubenswrapper[4902]: I1202 14:51:25.874607 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:25 crc kubenswrapper[4902]: I1202 14:51:25.875264 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:26 crc kubenswrapper[4902]: I1202 14:51:26.929500 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rzgk6" podUID="1a8a6082-fbd2-43c0-acd3-b251c589ed0f" containerName="registry-server" probeResult="failure" output=< Dec 02 14:51:26 crc kubenswrapper[4902]: timeout: failed to connect service ":50051" within 1s Dec 02 14:51:26 crc kubenswrapper[4902]: > Dec 02 14:51:28 crc kubenswrapper[4902]: I1202 14:51:28.027547 4902 generic.go:334] "Generic (PLEG): container finished" podID="53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" containerID="c2cc5c6cb265780fc3f32c98f13d6fa3e6c31468d655cd227580191a41d2e1b2" exitCode=0 Dec 02 14:51:28 crc kubenswrapper[4902]: I1202 14:51:28.027614 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" event={"ID":"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8","Type":"ContainerDied","Data":"c2cc5c6cb265780fc3f32c98f13d6fa3e6c31468d655cd227580191a41d2e1b2"} Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.456371 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.578412 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-nova-combined-ca-bundle\") pod \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.578474 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-ovn-default-certs-0\") pod \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.578512 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.578531 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbrt9\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-kube-api-access-pbrt9\") pod \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.578585 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.578614 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-inventory\") pod \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.578744 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-telemetry-combined-ca-bundle\") pod \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.578762 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-bootstrap-combined-ca-bundle\") pod \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.578831 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-libvirt-combined-ca-bundle\") pod \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.578880 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-neutron-metadata-combined-ca-bundle\") pod \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.578894 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-ovn-combined-ca-bundle\") pod \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.578944 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-repo-setup-combined-ca-bundle\") pod \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.578967 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-ssh-key\") pod \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.579000 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\" (UID: \"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8\") " Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.586375 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" (UID: "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.588411 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" (UID: "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.588505 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-kube-api-access-pbrt9" (OuterVolumeSpecName: "kube-api-access-pbrt9") pod "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" (UID: "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8"). InnerVolumeSpecName "kube-api-access-pbrt9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.589033 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" (UID: "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.589118 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" (UID: "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.589138 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" (UID: "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.589165 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" (UID: "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.589869 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" (UID: "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.589911 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" (UID: "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.590055 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" (UID: "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.590706 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" (UID: "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.591085 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" (UID: "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.616300 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-inventory" (OuterVolumeSpecName: "inventory") pod "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" (UID: "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.624390 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" (UID: "53ecd2a3-800b-4718-bf18-8b77eb9bbbe8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.680911 4902 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.680949 4902 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.680962 4902 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.680973 4902 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.680987 4902 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.680999 4902 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.681014 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.681028 4902 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.681042 4902 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.681053 4902 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.681065 4902 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.681078 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbrt9\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-kube-api-access-pbrt9\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.681090 4902 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:29 crc kubenswrapper[4902]: I1202 14:51:29.681102 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53ecd2a3-800b-4718-bf18-8b77eb9bbbe8-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.052826 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" event={"ID":"53ecd2a3-800b-4718-bf18-8b77eb9bbbe8","Type":"ContainerDied","Data":"52f139bde19fe481004977716505ed049d834ff13f9e95b8133404b79ac30ecb"} Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.052899 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52f139bde19fe481004977716505ed049d834ff13f9e95b8133404b79ac30ecb" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.052902 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.165632 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9"] Dec 02 14:51:30 crc kubenswrapper[4902]: E1202 14:51:30.166047 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.166073 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.166338 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="53ecd2a3-800b-4718-bf18-8b77eb9bbbe8" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.167254 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.170664 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.171116 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.171282 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.171433 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.171615 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.177991 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9"] Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.296475 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9rpw\" (UniqueName: \"kubernetes.io/projected/af830218-8303-46c8-a31a-a33fa89d0034-kube-api-access-f9rpw\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qfls9\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.296538 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/af830218-8303-46c8-a31a-a33fa89d0034-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qfls9\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.296678 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qfls9\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.296792 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qfls9\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.296842 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qfls9\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.398401 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qfls9\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.398530 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qfls9\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.398684 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9rpw\" (UniqueName: \"kubernetes.io/projected/af830218-8303-46c8-a31a-a33fa89d0034-kube-api-access-f9rpw\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qfls9\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.398871 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/af830218-8303-46c8-a31a-a33fa89d0034-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qfls9\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.399243 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qfls9\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.400129 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/af830218-8303-46c8-a31a-a33fa89d0034-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qfls9\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.403113 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qfls9\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.403818 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qfls9\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.406887 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qfls9\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.422230 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9rpw\" (UniqueName: \"kubernetes.io/projected/af830218-8303-46c8-a31a-a33fa89d0034-kube-api-access-f9rpw\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qfls9\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:30 crc kubenswrapper[4902]: I1202 14:51:30.496395 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:51:31 crc kubenswrapper[4902]: I1202 14:51:31.020573 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9"] Dec 02 14:51:31 crc kubenswrapper[4902]: I1202 14:51:31.061343 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" event={"ID":"af830218-8303-46c8-a31a-a33fa89d0034","Type":"ContainerStarted","Data":"74f060757f48cd7e90917462cb62c90b718df919b3f03f6f55975bcb9ed79222"} Dec 02 14:51:32 crc kubenswrapper[4902]: I1202 14:51:32.072296 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" event={"ID":"af830218-8303-46c8-a31a-a33fa89d0034","Type":"ContainerStarted","Data":"129c8ab493cce7c274090b1d4a1d0239e8794de0df6ba72555067dfe66e1d86e"} Dec 02 14:51:32 crc kubenswrapper[4902]: I1202 14:51:32.111167 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" podStartSLOduration=1.598111119 podStartE2EDuration="2.111138791s" podCreationTimestamp="2025-12-02 14:51:30 +0000 UTC" firstStartedPulling="2025-12-02 14:51:31.024377664 +0000 UTC m=+2122.215686373" lastFinishedPulling="2025-12-02 14:51:31.537405326 +0000 UTC m=+2122.728714045" observedRunningTime="2025-12-02 14:51:32.102843516 +0000 UTC m=+2123.294152255" watchObservedRunningTime="2025-12-02 14:51:32.111138791 +0000 UTC m=+2123.302447540" Dec 02 14:51:35 crc kubenswrapper[4902]: I1202 14:51:35.936942 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:35 crc kubenswrapper[4902]: I1202 14:51:35.999545 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:36 crc kubenswrapper[4902]: I1202 14:51:36.180448 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rzgk6"] Dec 02 14:51:37 crc kubenswrapper[4902]: I1202 14:51:37.134262 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rzgk6" podUID="1a8a6082-fbd2-43c0-acd3-b251c589ed0f" containerName="registry-server" containerID="cri-o://dd0494ac1c8678f51a4024d0c9f646e6205213c15d9789fc3e4b1085207762dd" gracePeriod=2 Dec 02 14:51:37 crc kubenswrapper[4902]: I1202 14:51:37.653772 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:37 crc kubenswrapper[4902]: I1202 14:51:37.760548 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkfgh\" (UniqueName: \"kubernetes.io/projected/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-kube-api-access-vkfgh\") pod \"1a8a6082-fbd2-43c0-acd3-b251c589ed0f\" (UID: \"1a8a6082-fbd2-43c0-acd3-b251c589ed0f\") " Dec 02 14:51:37 crc kubenswrapper[4902]: I1202 14:51:37.760697 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-utilities\") pod \"1a8a6082-fbd2-43c0-acd3-b251c589ed0f\" (UID: \"1a8a6082-fbd2-43c0-acd3-b251c589ed0f\") " Dec 02 14:51:37 crc kubenswrapper[4902]: I1202 14:51:37.760759 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-catalog-content\") pod \"1a8a6082-fbd2-43c0-acd3-b251c589ed0f\" (UID: \"1a8a6082-fbd2-43c0-acd3-b251c589ed0f\") " Dec 02 14:51:37 crc kubenswrapper[4902]: I1202 14:51:37.761803 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-utilities" (OuterVolumeSpecName: "utilities") pod "1a8a6082-fbd2-43c0-acd3-b251c589ed0f" (UID: "1a8a6082-fbd2-43c0-acd3-b251c589ed0f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:51:37 crc kubenswrapper[4902]: I1202 14:51:37.767663 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-kube-api-access-vkfgh" (OuterVolumeSpecName: "kube-api-access-vkfgh") pod "1a8a6082-fbd2-43c0-acd3-b251c589ed0f" (UID: "1a8a6082-fbd2-43c0-acd3-b251c589ed0f"). InnerVolumeSpecName "kube-api-access-vkfgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:51:37 crc kubenswrapper[4902]: I1202 14:51:37.863708 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkfgh\" (UniqueName: \"kubernetes.io/projected/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-kube-api-access-vkfgh\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:37 crc kubenswrapper[4902]: I1202 14:51:37.863747 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:37 crc kubenswrapper[4902]: I1202 14:51:37.868732 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1a8a6082-fbd2-43c0-acd3-b251c589ed0f" (UID: "1a8a6082-fbd2-43c0-acd3-b251c589ed0f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:51:37 crc kubenswrapper[4902]: I1202 14:51:37.967099 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a8a6082-fbd2-43c0-acd3-b251c589ed0f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:51:38 crc kubenswrapper[4902]: I1202 14:51:38.152763 4902 generic.go:334] "Generic (PLEG): container finished" podID="1a8a6082-fbd2-43c0-acd3-b251c589ed0f" containerID="dd0494ac1c8678f51a4024d0c9f646e6205213c15d9789fc3e4b1085207762dd" exitCode=0 Dec 02 14:51:38 crc kubenswrapper[4902]: I1202 14:51:38.152830 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzgk6" event={"ID":"1a8a6082-fbd2-43c0-acd3-b251c589ed0f","Type":"ContainerDied","Data":"dd0494ac1c8678f51a4024d0c9f646e6205213c15d9789fc3e4b1085207762dd"} Dec 02 14:51:38 crc kubenswrapper[4902]: I1202 14:51:38.152863 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rzgk6" Dec 02 14:51:38 crc kubenswrapper[4902]: I1202 14:51:38.152896 4902 scope.go:117] "RemoveContainer" containerID="dd0494ac1c8678f51a4024d0c9f646e6205213c15d9789fc3e4b1085207762dd" Dec 02 14:51:38 crc kubenswrapper[4902]: I1202 14:51:38.152879 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzgk6" event={"ID":"1a8a6082-fbd2-43c0-acd3-b251c589ed0f","Type":"ContainerDied","Data":"77bb0d4b68d7ee23b4bcdbf7f757e736c30b155faf592de1e76cf52f8be72227"} Dec 02 14:51:38 crc kubenswrapper[4902]: I1202 14:51:38.193524 4902 scope.go:117] "RemoveContainer" containerID="0bde9ca3b8c30050d4a3b9b2ab8333c4b3509599dfe0f0e99c45b021ef82023d" Dec 02 14:51:38 crc kubenswrapper[4902]: I1202 14:51:38.194209 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rzgk6"] Dec 02 14:51:38 crc kubenswrapper[4902]: I1202 14:51:38.204255 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rzgk6"] Dec 02 14:51:38 crc kubenswrapper[4902]: I1202 14:51:38.230446 4902 scope.go:117] "RemoveContainer" containerID="48e39fc782837745b5bfa499362f37c0c2998b8addd7928b3da7ee45bd705248" Dec 02 14:51:38 crc kubenswrapper[4902]: I1202 14:51:38.263034 4902 scope.go:117] "RemoveContainer" containerID="dd0494ac1c8678f51a4024d0c9f646e6205213c15d9789fc3e4b1085207762dd" Dec 02 14:51:38 crc kubenswrapper[4902]: E1202 14:51:38.263527 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd0494ac1c8678f51a4024d0c9f646e6205213c15d9789fc3e4b1085207762dd\": container with ID starting with dd0494ac1c8678f51a4024d0c9f646e6205213c15d9789fc3e4b1085207762dd not found: ID does not exist" containerID="dd0494ac1c8678f51a4024d0c9f646e6205213c15d9789fc3e4b1085207762dd" Dec 02 14:51:38 crc kubenswrapper[4902]: I1202 14:51:38.263607 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd0494ac1c8678f51a4024d0c9f646e6205213c15d9789fc3e4b1085207762dd"} err="failed to get container status \"dd0494ac1c8678f51a4024d0c9f646e6205213c15d9789fc3e4b1085207762dd\": rpc error: code = NotFound desc = could not find container \"dd0494ac1c8678f51a4024d0c9f646e6205213c15d9789fc3e4b1085207762dd\": container with ID starting with dd0494ac1c8678f51a4024d0c9f646e6205213c15d9789fc3e4b1085207762dd not found: ID does not exist" Dec 02 14:51:38 crc kubenswrapper[4902]: I1202 14:51:38.263643 4902 scope.go:117] "RemoveContainer" containerID="0bde9ca3b8c30050d4a3b9b2ab8333c4b3509599dfe0f0e99c45b021ef82023d" Dec 02 14:51:38 crc kubenswrapper[4902]: E1202 14:51:38.264086 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bde9ca3b8c30050d4a3b9b2ab8333c4b3509599dfe0f0e99c45b021ef82023d\": container with ID starting with 0bde9ca3b8c30050d4a3b9b2ab8333c4b3509599dfe0f0e99c45b021ef82023d not found: ID does not exist" containerID="0bde9ca3b8c30050d4a3b9b2ab8333c4b3509599dfe0f0e99c45b021ef82023d" Dec 02 14:51:38 crc kubenswrapper[4902]: I1202 14:51:38.264123 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bde9ca3b8c30050d4a3b9b2ab8333c4b3509599dfe0f0e99c45b021ef82023d"} err="failed to get container status \"0bde9ca3b8c30050d4a3b9b2ab8333c4b3509599dfe0f0e99c45b021ef82023d\": rpc error: code = NotFound desc = could not find container \"0bde9ca3b8c30050d4a3b9b2ab8333c4b3509599dfe0f0e99c45b021ef82023d\": container with ID starting with 0bde9ca3b8c30050d4a3b9b2ab8333c4b3509599dfe0f0e99c45b021ef82023d not found: ID does not exist" Dec 02 14:51:38 crc kubenswrapper[4902]: I1202 14:51:38.264152 4902 scope.go:117] "RemoveContainer" containerID="48e39fc782837745b5bfa499362f37c0c2998b8addd7928b3da7ee45bd705248" Dec 02 14:51:38 crc kubenswrapper[4902]: E1202 14:51:38.264478 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48e39fc782837745b5bfa499362f37c0c2998b8addd7928b3da7ee45bd705248\": container with ID starting with 48e39fc782837745b5bfa499362f37c0c2998b8addd7928b3da7ee45bd705248 not found: ID does not exist" containerID="48e39fc782837745b5bfa499362f37c0c2998b8addd7928b3da7ee45bd705248" Dec 02 14:51:38 crc kubenswrapper[4902]: I1202 14:51:38.264544 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48e39fc782837745b5bfa499362f37c0c2998b8addd7928b3da7ee45bd705248"} err="failed to get container status \"48e39fc782837745b5bfa499362f37c0c2998b8addd7928b3da7ee45bd705248\": rpc error: code = NotFound desc = could not find container \"48e39fc782837745b5bfa499362f37c0c2998b8addd7928b3da7ee45bd705248\": container with ID starting with 48e39fc782837745b5bfa499362f37c0c2998b8addd7928b3da7ee45bd705248 not found: ID does not exist" Dec 02 14:51:39 crc kubenswrapper[4902]: I1202 14:51:39.120455 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a8a6082-fbd2-43c0-acd3-b251c589ed0f" path="/var/lib/kubelet/pods/1a8a6082-fbd2-43c0-acd3-b251c589ed0f/volumes" Dec 02 14:52:04 crc kubenswrapper[4902]: I1202 14:52:04.731790 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:52:04 crc kubenswrapper[4902]: I1202 14:52:04.732320 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:52:34 crc kubenswrapper[4902]: I1202 14:52:34.731285 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:52:34 crc kubenswrapper[4902]: I1202 14:52:34.732007 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:52:39 crc kubenswrapper[4902]: I1202 14:52:39.803842 4902 generic.go:334] "Generic (PLEG): container finished" podID="af830218-8303-46c8-a31a-a33fa89d0034" containerID="129c8ab493cce7c274090b1d4a1d0239e8794de0df6ba72555067dfe66e1d86e" exitCode=0 Dec 02 14:52:39 crc kubenswrapper[4902]: I1202 14:52:39.803931 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" event={"ID":"af830218-8303-46c8-a31a-a33fa89d0034","Type":"ContainerDied","Data":"129c8ab493cce7c274090b1d4a1d0239e8794de0df6ba72555067dfe66e1d86e"} Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.256442 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.325001 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-ovn-combined-ca-bundle\") pod \"af830218-8303-46c8-a31a-a33fa89d0034\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.325082 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-inventory\") pod \"af830218-8303-46c8-a31a-a33fa89d0034\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.325109 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-ssh-key\") pod \"af830218-8303-46c8-a31a-a33fa89d0034\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.325128 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9rpw\" (UniqueName: \"kubernetes.io/projected/af830218-8303-46c8-a31a-a33fa89d0034-kube-api-access-f9rpw\") pod \"af830218-8303-46c8-a31a-a33fa89d0034\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.325203 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/af830218-8303-46c8-a31a-a33fa89d0034-ovncontroller-config-0\") pod \"af830218-8303-46c8-a31a-a33fa89d0034\" (UID: \"af830218-8303-46c8-a31a-a33fa89d0034\") " Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.335658 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af830218-8303-46c8-a31a-a33fa89d0034-kube-api-access-f9rpw" (OuterVolumeSpecName: "kube-api-access-f9rpw") pod "af830218-8303-46c8-a31a-a33fa89d0034" (UID: "af830218-8303-46c8-a31a-a33fa89d0034"). InnerVolumeSpecName "kube-api-access-f9rpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.338734 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "af830218-8303-46c8-a31a-a33fa89d0034" (UID: "af830218-8303-46c8-a31a-a33fa89d0034"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.354750 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af830218-8303-46c8-a31a-a33fa89d0034-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "af830218-8303-46c8-a31a-a33fa89d0034" (UID: "af830218-8303-46c8-a31a-a33fa89d0034"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.355298 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "af830218-8303-46c8-a31a-a33fa89d0034" (UID: "af830218-8303-46c8-a31a-a33fa89d0034"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.356783 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-inventory" (OuterVolumeSpecName: "inventory") pod "af830218-8303-46c8-a31a-a33fa89d0034" (UID: "af830218-8303-46c8-a31a-a33fa89d0034"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.427007 4902 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.427042 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.427051 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af830218-8303-46c8-a31a-a33fa89d0034-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.427058 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9rpw\" (UniqueName: \"kubernetes.io/projected/af830218-8303-46c8-a31a-a33fa89d0034-kube-api-access-f9rpw\") on node \"crc\" DevicePath \"\"" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.427067 4902 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/af830218-8303-46c8-a31a-a33fa89d0034-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.828874 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" event={"ID":"af830218-8303-46c8-a31a-a33fa89d0034","Type":"ContainerDied","Data":"74f060757f48cd7e90917462cb62c90b718df919b3f03f6f55975bcb9ed79222"} Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.829231 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74f060757f48cd7e90917462cb62c90b718df919b3f03f6f55975bcb9ed79222" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.828985 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qfls9" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.934733 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v"] Dec 02 14:52:41 crc kubenswrapper[4902]: E1202 14:52:41.935296 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a8a6082-fbd2-43c0-acd3-b251c589ed0f" containerName="registry-server" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.935319 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a8a6082-fbd2-43c0-acd3-b251c589ed0f" containerName="registry-server" Dec 02 14:52:41 crc kubenswrapper[4902]: E1202 14:52:41.935353 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af830218-8303-46c8-a31a-a33fa89d0034" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.935363 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="af830218-8303-46c8-a31a-a33fa89d0034" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 02 14:52:41 crc kubenswrapper[4902]: E1202 14:52:41.935373 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a8a6082-fbd2-43c0-acd3-b251c589ed0f" containerName="extract-utilities" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.935399 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a8a6082-fbd2-43c0-acd3-b251c589ed0f" containerName="extract-utilities" Dec 02 14:52:41 crc kubenswrapper[4902]: E1202 14:52:41.935433 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a8a6082-fbd2-43c0-acd3-b251c589ed0f" containerName="extract-content" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.935442 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a8a6082-fbd2-43c0-acd3-b251c589ed0f" containerName="extract-content" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.935786 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="af830218-8303-46c8-a31a-a33fa89d0034" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.935829 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a8a6082-fbd2-43c0-acd3-b251c589ed0f" containerName="registry-server" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.936711 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.938747 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.939078 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.939232 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.939413 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.939785 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.939903 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 14:52:41 crc kubenswrapper[4902]: I1202 14:52:41.945435 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v"] Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.037028 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.037074 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.037155 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v62kn\" (UniqueName: \"kubernetes.io/projected/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-kube-api-access-v62kn\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.037195 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.037296 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.037346 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.139748 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v62kn\" (UniqueName: \"kubernetes.io/projected/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-kube-api-access-v62kn\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.139853 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.139961 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.140071 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.140104 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.140140 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.144117 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.144390 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.145695 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.149668 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.156226 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.163625 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v62kn\" (UniqueName: \"kubernetes.io/projected/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-kube-api-access-v62kn\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.251469 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:52:42 crc kubenswrapper[4902]: I1202 14:52:42.840697 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v"] Dec 02 14:52:43 crc kubenswrapper[4902]: I1202 14:52:43.850218 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" event={"ID":"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47","Type":"ContainerStarted","Data":"d8644e52fd3ce71fbbf5713312d26288d2e64e39c85f5ea3e41fdeb42295f250"} Dec 02 14:52:43 crc kubenswrapper[4902]: I1202 14:52:43.850847 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" event={"ID":"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47","Type":"ContainerStarted","Data":"90092634c404c8390b4cd81f9085b678f9c6903071c5d69e41a3d4f9d65baac5"} Dec 02 14:52:43 crc kubenswrapper[4902]: I1202 14:52:43.875974 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" podStartSLOduration=2.288051922 podStartE2EDuration="2.875951779s" podCreationTimestamp="2025-12-02 14:52:41 +0000 UTC" firstStartedPulling="2025-12-02 14:52:42.849314997 +0000 UTC m=+2194.040623706" lastFinishedPulling="2025-12-02 14:52:43.437214854 +0000 UTC m=+2194.628523563" observedRunningTime="2025-12-02 14:52:43.869317941 +0000 UTC m=+2195.060626660" watchObservedRunningTime="2025-12-02 14:52:43.875951779 +0000 UTC m=+2195.067260488" Dec 02 14:53:02 crc kubenswrapper[4902]: I1202 14:53:02.931007 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xsmnz"] Dec 02 14:53:02 crc kubenswrapper[4902]: I1202 14:53:02.939222 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:02 crc kubenswrapper[4902]: I1202 14:53:02.943594 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xsmnz"] Dec 02 14:53:03 crc kubenswrapper[4902]: I1202 14:53:03.030624 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7be49ee7-5b0d-44b7-84bb-0e23c542d516-catalog-content\") pod \"redhat-marketplace-xsmnz\" (UID: \"7be49ee7-5b0d-44b7-84bb-0e23c542d516\") " pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:03 crc kubenswrapper[4902]: I1202 14:53:03.030711 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhd46\" (UniqueName: \"kubernetes.io/projected/7be49ee7-5b0d-44b7-84bb-0e23c542d516-kube-api-access-zhd46\") pod \"redhat-marketplace-xsmnz\" (UID: \"7be49ee7-5b0d-44b7-84bb-0e23c542d516\") " pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:03 crc kubenswrapper[4902]: I1202 14:53:03.030891 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7be49ee7-5b0d-44b7-84bb-0e23c542d516-utilities\") pod \"redhat-marketplace-xsmnz\" (UID: \"7be49ee7-5b0d-44b7-84bb-0e23c542d516\") " pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:03 crc kubenswrapper[4902]: I1202 14:53:03.133095 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhd46\" (UniqueName: \"kubernetes.io/projected/7be49ee7-5b0d-44b7-84bb-0e23c542d516-kube-api-access-zhd46\") pod \"redhat-marketplace-xsmnz\" (UID: \"7be49ee7-5b0d-44b7-84bb-0e23c542d516\") " pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:03 crc kubenswrapper[4902]: I1202 14:53:03.133215 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7be49ee7-5b0d-44b7-84bb-0e23c542d516-utilities\") pod \"redhat-marketplace-xsmnz\" (UID: \"7be49ee7-5b0d-44b7-84bb-0e23c542d516\") " pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:03 crc kubenswrapper[4902]: I1202 14:53:03.133300 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7be49ee7-5b0d-44b7-84bb-0e23c542d516-catalog-content\") pod \"redhat-marketplace-xsmnz\" (UID: \"7be49ee7-5b0d-44b7-84bb-0e23c542d516\") " pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:03 crc kubenswrapper[4902]: I1202 14:53:03.133798 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7be49ee7-5b0d-44b7-84bb-0e23c542d516-catalog-content\") pod \"redhat-marketplace-xsmnz\" (UID: \"7be49ee7-5b0d-44b7-84bb-0e23c542d516\") " pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:03 crc kubenswrapper[4902]: I1202 14:53:03.133795 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7be49ee7-5b0d-44b7-84bb-0e23c542d516-utilities\") pod \"redhat-marketplace-xsmnz\" (UID: \"7be49ee7-5b0d-44b7-84bb-0e23c542d516\") " pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:03 crc kubenswrapper[4902]: I1202 14:53:03.157647 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhd46\" (UniqueName: \"kubernetes.io/projected/7be49ee7-5b0d-44b7-84bb-0e23c542d516-kube-api-access-zhd46\") pod \"redhat-marketplace-xsmnz\" (UID: \"7be49ee7-5b0d-44b7-84bb-0e23c542d516\") " pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:03 crc kubenswrapper[4902]: I1202 14:53:03.356434 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:03 crc kubenswrapper[4902]: W1202 14:53:03.933461 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7be49ee7_5b0d_44b7_84bb_0e23c542d516.slice/crio-9f96fd58c7dcb5dd6ae20eb696de2573c47267e7de36fa8b0fdae1ae6afd7d0a WatchSource:0}: Error finding container 9f96fd58c7dcb5dd6ae20eb696de2573c47267e7de36fa8b0fdae1ae6afd7d0a: Status 404 returned error can't find the container with id 9f96fd58c7dcb5dd6ae20eb696de2573c47267e7de36fa8b0fdae1ae6afd7d0a Dec 02 14:53:03 crc kubenswrapper[4902]: I1202 14:53:03.935014 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xsmnz"] Dec 02 14:53:04 crc kubenswrapper[4902]: I1202 14:53:04.053725 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xsmnz" event={"ID":"7be49ee7-5b0d-44b7-84bb-0e23c542d516","Type":"ContainerStarted","Data":"9f96fd58c7dcb5dd6ae20eb696de2573c47267e7de36fa8b0fdae1ae6afd7d0a"} Dec 02 14:53:04 crc kubenswrapper[4902]: I1202 14:53:04.731531 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:53:04 crc kubenswrapper[4902]: I1202 14:53:04.731919 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:53:04 crc kubenswrapper[4902]: I1202 14:53:04.731975 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:53:04 crc kubenswrapper[4902]: I1202 14:53:04.732828 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d7fd0c2463f17287d481fe5e84abc83416fa52dc118f1290102b095ff55ebf6f"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 14:53:04 crc kubenswrapper[4902]: I1202 14:53:04.732916 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://d7fd0c2463f17287d481fe5e84abc83416fa52dc118f1290102b095ff55ebf6f" gracePeriod=600 Dec 02 14:53:05 crc kubenswrapper[4902]: I1202 14:53:05.064281 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="d7fd0c2463f17287d481fe5e84abc83416fa52dc118f1290102b095ff55ebf6f" exitCode=0 Dec 02 14:53:05 crc kubenswrapper[4902]: I1202 14:53:05.064364 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"d7fd0c2463f17287d481fe5e84abc83416fa52dc118f1290102b095ff55ebf6f"} Dec 02 14:53:05 crc kubenswrapper[4902]: I1202 14:53:05.064459 4902 scope.go:117] "RemoveContainer" containerID="ba9e508073e6997b268be3717357d2ef27b79bcc7fe4ab4e2c3a446cdba07c2f" Dec 02 14:53:05 crc kubenswrapper[4902]: I1202 14:53:05.066176 4902 generic.go:334] "Generic (PLEG): container finished" podID="7be49ee7-5b0d-44b7-84bb-0e23c542d516" containerID="2d28ae8e011c5c90c3d41a15038f7d99b9320b888abd7af08d040a50ad363730" exitCode=0 Dec 02 14:53:05 crc kubenswrapper[4902]: I1202 14:53:05.066219 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xsmnz" event={"ID":"7be49ee7-5b0d-44b7-84bb-0e23c542d516","Type":"ContainerDied","Data":"2d28ae8e011c5c90c3d41a15038f7d99b9320b888abd7af08d040a50ad363730"} Dec 02 14:53:06 crc kubenswrapper[4902]: I1202 14:53:06.081868 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90"} Dec 02 14:53:07 crc kubenswrapper[4902]: I1202 14:53:07.094741 4902 generic.go:334] "Generic (PLEG): container finished" podID="7be49ee7-5b0d-44b7-84bb-0e23c542d516" containerID="f23cf285648d216a9b9c4312bd8634d02620216b57725290e2b22e9126c14e79" exitCode=0 Dec 02 14:53:07 crc kubenswrapper[4902]: I1202 14:53:07.094830 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xsmnz" event={"ID":"7be49ee7-5b0d-44b7-84bb-0e23c542d516","Type":"ContainerDied","Data":"f23cf285648d216a9b9c4312bd8634d02620216b57725290e2b22e9126c14e79"} Dec 02 14:53:09 crc kubenswrapper[4902]: I1202 14:53:09.124219 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xsmnz" event={"ID":"7be49ee7-5b0d-44b7-84bb-0e23c542d516","Type":"ContainerStarted","Data":"563f636769191ea5878b7fa749bf689d9f84a6b61deb7ee238dbf3991581c94d"} Dec 02 14:53:09 crc kubenswrapper[4902]: I1202 14:53:09.141778 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xsmnz" podStartSLOduration=4.240058728 podStartE2EDuration="7.141757599s" podCreationTimestamp="2025-12-02 14:53:02 +0000 UTC" firstStartedPulling="2025-12-02 14:53:05.068417722 +0000 UTC m=+2216.259726421" lastFinishedPulling="2025-12-02 14:53:07.970116583 +0000 UTC m=+2219.161425292" observedRunningTime="2025-12-02 14:53:09.138860706 +0000 UTC m=+2220.330169415" watchObservedRunningTime="2025-12-02 14:53:09.141757599 +0000 UTC m=+2220.333066308" Dec 02 14:53:13 crc kubenswrapper[4902]: I1202 14:53:13.357411 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:13 crc kubenswrapper[4902]: I1202 14:53:13.359032 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:13 crc kubenswrapper[4902]: I1202 14:53:13.418930 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:14 crc kubenswrapper[4902]: I1202 14:53:14.228132 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:14 crc kubenswrapper[4902]: I1202 14:53:14.321676 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xsmnz"] Dec 02 14:53:16 crc kubenswrapper[4902]: I1202 14:53:16.200002 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xsmnz" podUID="7be49ee7-5b0d-44b7-84bb-0e23c542d516" containerName="registry-server" containerID="cri-o://563f636769191ea5878b7fa749bf689d9f84a6b61deb7ee238dbf3991581c94d" gracePeriod=2 Dec 02 14:53:16 crc kubenswrapper[4902]: I1202 14:53:16.712672 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:16 crc kubenswrapper[4902]: I1202 14:53:16.836084 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7be49ee7-5b0d-44b7-84bb-0e23c542d516-catalog-content\") pod \"7be49ee7-5b0d-44b7-84bb-0e23c542d516\" (UID: \"7be49ee7-5b0d-44b7-84bb-0e23c542d516\") " Dec 02 14:53:16 crc kubenswrapper[4902]: I1202 14:53:16.836127 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7be49ee7-5b0d-44b7-84bb-0e23c542d516-utilities\") pod \"7be49ee7-5b0d-44b7-84bb-0e23c542d516\" (UID: \"7be49ee7-5b0d-44b7-84bb-0e23c542d516\") " Dec 02 14:53:16 crc kubenswrapper[4902]: I1202 14:53:16.836159 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhd46\" (UniqueName: \"kubernetes.io/projected/7be49ee7-5b0d-44b7-84bb-0e23c542d516-kube-api-access-zhd46\") pod \"7be49ee7-5b0d-44b7-84bb-0e23c542d516\" (UID: \"7be49ee7-5b0d-44b7-84bb-0e23c542d516\") " Dec 02 14:53:16 crc kubenswrapper[4902]: I1202 14:53:16.837192 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7be49ee7-5b0d-44b7-84bb-0e23c542d516-utilities" (OuterVolumeSpecName: "utilities") pod "7be49ee7-5b0d-44b7-84bb-0e23c542d516" (UID: "7be49ee7-5b0d-44b7-84bb-0e23c542d516"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:53:16 crc kubenswrapper[4902]: I1202 14:53:16.842586 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7be49ee7-5b0d-44b7-84bb-0e23c542d516-kube-api-access-zhd46" (OuterVolumeSpecName: "kube-api-access-zhd46") pod "7be49ee7-5b0d-44b7-84bb-0e23c542d516" (UID: "7be49ee7-5b0d-44b7-84bb-0e23c542d516"). InnerVolumeSpecName "kube-api-access-zhd46". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:53:16 crc kubenswrapper[4902]: I1202 14:53:16.855767 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7be49ee7-5b0d-44b7-84bb-0e23c542d516-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7be49ee7-5b0d-44b7-84bb-0e23c542d516" (UID: "7be49ee7-5b0d-44b7-84bb-0e23c542d516"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:53:16 crc kubenswrapper[4902]: I1202 14:53:16.937880 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7be49ee7-5b0d-44b7-84bb-0e23c542d516-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:53:16 crc kubenswrapper[4902]: I1202 14:53:16.937916 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7be49ee7-5b0d-44b7-84bb-0e23c542d516-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:53:16 crc kubenswrapper[4902]: I1202 14:53:16.937927 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhd46\" (UniqueName: \"kubernetes.io/projected/7be49ee7-5b0d-44b7-84bb-0e23c542d516-kube-api-access-zhd46\") on node \"crc\" DevicePath \"\"" Dec 02 14:53:17 crc kubenswrapper[4902]: I1202 14:53:17.211501 4902 generic.go:334] "Generic (PLEG): container finished" podID="7be49ee7-5b0d-44b7-84bb-0e23c542d516" containerID="563f636769191ea5878b7fa749bf689d9f84a6b61deb7ee238dbf3991581c94d" exitCode=0 Dec 02 14:53:17 crc kubenswrapper[4902]: I1202 14:53:17.211550 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xsmnz" event={"ID":"7be49ee7-5b0d-44b7-84bb-0e23c542d516","Type":"ContainerDied","Data":"563f636769191ea5878b7fa749bf689d9f84a6b61deb7ee238dbf3991581c94d"} Dec 02 14:53:17 crc kubenswrapper[4902]: I1202 14:53:17.211617 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xsmnz" event={"ID":"7be49ee7-5b0d-44b7-84bb-0e23c542d516","Type":"ContainerDied","Data":"9f96fd58c7dcb5dd6ae20eb696de2573c47267e7de36fa8b0fdae1ae6afd7d0a"} Dec 02 14:53:17 crc kubenswrapper[4902]: I1202 14:53:17.211637 4902 scope.go:117] "RemoveContainer" containerID="563f636769191ea5878b7fa749bf689d9f84a6b61deb7ee238dbf3991581c94d" Dec 02 14:53:17 crc kubenswrapper[4902]: I1202 14:53:17.211636 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xsmnz" Dec 02 14:53:17 crc kubenswrapper[4902]: I1202 14:53:17.239386 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xsmnz"] Dec 02 14:53:17 crc kubenswrapper[4902]: I1202 14:53:17.240522 4902 scope.go:117] "RemoveContainer" containerID="f23cf285648d216a9b9c4312bd8634d02620216b57725290e2b22e9126c14e79" Dec 02 14:53:17 crc kubenswrapper[4902]: I1202 14:53:17.259395 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xsmnz"] Dec 02 14:53:17 crc kubenswrapper[4902]: I1202 14:53:17.261093 4902 scope.go:117] "RemoveContainer" containerID="2d28ae8e011c5c90c3d41a15038f7d99b9320b888abd7af08d040a50ad363730" Dec 02 14:53:17 crc kubenswrapper[4902]: I1202 14:53:17.319901 4902 scope.go:117] "RemoveContainer" containerID="563f636769191ea5878b7fa749bf689d9f84a6b61deb7ee238dbf3991581c94d" Dec 02 14:53:17 crc kubenswrapper[4902]: E1202 14:53:17.320297 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"563f636769191ea5878b7fa749bf689d9f84a6b61deb7ee238dbf3991581c94d\": container with ID starting with 563f636769191ea5878b7fa749bf689d9f84a6b61deb7ee238dbf3991581c94d not found: ID does not exist" containerID="563f636769191ea5878b7fa749bf689d9f84a6b61deb7ee238dbf3991581c94d" Dec 02 14:53:17 crc kubenswrapper[4902]: I1202 14:53:17.320344 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"563f636769191ea5878b7fa749bf689d9f84a6b61deb7ee238dbf3991581c94d"} err="failed to get container status \"563f636769191ea5878b7fa749bf689d9f84a6b61deb7ee238dbf3991581c94d\": rpc error: code = NotFound desc = could not find container \"563f636769191ea5878b7fa749bf689d9f84a6b61deb7ee238dbf3991581c94d\": container with ID starting with 563f636769191ea5878b7fa749bf689d9f84a6b61deb7ee238dbf3991581c94d not found: ID does not exist" Dec 02 14:53:17 crc kubenswrapper[4902]: I1202 14:53:17.320399 4902 scope.go:117] "RemoveContainer" containerID="f23cf285648d216a9b9c4312bd8634d02620216b57725290e2b22e9126c14e79" Dec 02 14:53:17 crc kubenswrapper[4902]: E1202 14:53:17.320856 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f23cf285648d216a9b9c4312bd8634d02620216b57725290e2b22e9126c14e79\": container with ID starting with f23cf285648d216a9b9c4312bd8634d02620216b57725290e2b22e9126c14e79 not found: ID does not exist" containerID="f23cf285648d216a9b9c4312bd8634d02620216b57725290e2b22e9126c14e79" Dec 02 14:53:17 crc kubenswrapper[4902]: I1202 14:53:17.320886 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f23cf285648d216a9b9c4312bd8634d02620216b57725290e2b22e9126c14e79"} err="failed to get container status \"f23cf285648d216a9b9c4312bd8634d02620216b57725290e2b22e9126c14e79\": rpc error: code = NotFound desc = could not find container \"f23cf285648d216a9b9c4312bd8634d02620216b57725290e2b22e9126c14e79\": container with ID starting with f23cf285648d216a9b9c4312bd8634d02620216b57725290e2b22e9126c14e79 not found: ID does not exist" Dec 02 14:53:17 crc kubenswrapper[4902]: I1202 14:53:17.320905 4902 scope.go:117] "RemoveContainer" containerID="2d28ae8e011c5c90c3d41a15038f7d99b9320b888abd7af08d040a50ad363730" Dec 02 14:53:17 crc kubenswrapper[4902]: E1202 14:53:17.321162 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d28ae8e011c5c90c3d41a15038f7d99b9320b888abd7af08d040a50ad363730\": container with ID starting with 2d28ae8e011c5c90c3d41a15038f7d99b9320b888abd7af08d040a50ad363730 not found: ID does not exist" containerID="2d28ae8e011c5c90c3d41a15038f7d99b9320b888abd7af08d040a50ad363730" Dec 02 14:53:17 crc kubenswrapper[4902]: I1202 14:53:17.321203 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d28ae8e011c5c90c3d41a15038f7d99b9320b888abd7af08d040a50ad363730"} err="failed to get container status \"2d28ae8e011c5c90c3d41a15038f7d99b9320b888abd7af08d040a50ad363730\": rpc error: code = NotFound desc = could not find container \"2d28ae8e011c5c90c3d41a15038f7d99b9320b888abd7af08d040a50ad363730\": container with ID starting with 2d28ae8e011c5c90c3d41a15038f7d99b9320b888abd7af08d040a50ad363730 not found: ID does not exist" Dec 02 14:53:19 crc kubenswrapper[4902]: I1202 14:53:19.126182 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7be49ee7-5b0d-44b7-84bb-0e23c542d516" path="/var/lib/kubelet/pods/7be49ee7-5b0d-44b7-84bb-0e23c542d516/volumes" Dec 02 14:53:35 crc kubenswrapper[4902]: I1202 14:53:35.488042 4902 generic.go:334] "Generic (PLEG): container finished" podID="6b639ec2-25b0-47b7-8e8e-4ff7fa466e47" containerID="d8644e52fd3ce71fbbf5713312d26288d2e64e39c85f5ea3e41fdeb42295f250" exitCode=0 Dec 02 14:53:35 crc kubenswrapper[4902]: I1202 14:53:35.488256 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" event={"ID":"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47","Type":"ContainerDied","Data":"d8644e52fd3ce71fbbf5713312d26288d2e64e39c85f5ea3e41fdeb42295f250"} Dec 02 14:53:36 crc kubenswrapper[4902]: I1202 14:53:36.904952 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.067532 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-neutron-ovn-metadata-agent-neutron-config-0\") pod \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.067700 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-ssh-key\") pod \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.067766 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-neutron-metadata-combined-ca-bundle\") pod \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.067804 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v62kn\" (UniqueName: \"kubernetes.io/projected/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-kube-api-access-v62kn\") pod \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.067869 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-inventory\") pod \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.068016 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-nova-metadata-neutron-config-0\") pod \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\" (UID: \"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47\") " Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.073305 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "6b639ec2-25b0-47b7-8e8e-4ff7fa466e47" (UID: "6b639ec2-25b0-47b7-8e8e-4ff7fa466e47"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.089831 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-kube-api-access-v62kn" (OuterVolumeSpecName: "kube-api-access-v62kn") pod "6b639ec2-25b0-47b7-8e8e-4ff7fa466e47" (UID: "6b639ec2-25b0-47b7-8e8e-4ff7fa466e47"). InnerVolumeSpecName "kube-api-access-v62kn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.097218 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-inventory" (OuterVolumeSpecName: "inventory") pod "6b639ec2-25b0-47b7-8e8e-4ff7fa466e47" (UID: "6b639ec2-25b0-47b7-8e8e-4ff7fa466e47"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.104718 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "6b639ec2-25b0-47b7-8e8e-4ff7fa466e47" (UID: "6b639ec2-25b0-47b7-8e8e-4ff7fa466e47"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.115830 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "6b639ec2-25b0-47b7-8e8e-4ff7fa466e47" (UID: "6b639ec2-25b0-47b7-8e8e-4ff7fa466e47"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.117907 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6b639ec2-25b0-47b7-8e8e-4ff7fa466e47" (UID: "6b639ec2-25b0-47b7-8e8e-4ff7fa466e47"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.171591 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.172916 4902 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.173105 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v62kn\" (UniqueName: \"kubernetes.io/projected/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-kube-api-access-v62kn\") on node \"crc\" DevicePath \"\"" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.173557 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.173895 4902 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.174225 4902 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6b639ec2-25b0-47b7-8e8e-4ff7fa466e47-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.510799 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" event={"ID":"6b639ec2-25b0-47b7-8e8e-4ff7fa466e47","Type":"ContainerDied","Data":"90092634c404c8390b4cd81f9085b678f9c6903071c5d69e41a3d4f9d65baac5"} Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.510870 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90092634c404c8390b4cd81f9085b678f9c6903071c5d69e41a3d4f9d65baac5" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.510912 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.689885 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl"] Dec 02 14:53:37 crc kubenswrapper[4902]: E1202 14:53:37.690704 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b639ec2-25b0-47b7-8e8e-4ff7fa466e47" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.690727 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b639ec2-25b0-47b7-8e8e-4ff7fa466e47" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 02 14:53:37 crc kubenswrapper[4902]: E1202 14:53:37.690748 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7be49ee7-5b0d-44b7-84bb-0e23c542d516" containerName="registry-server" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.690756 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="7be49ee7-5b0d-44b7-84bb-0e23c542d516" containerName="registry-server" Dec 02 14:53:37 crc kubenswrapper[4902]: E1202 14:53:37.690780 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7be49ee7-5b0d-44b7-84bb-0e23c542d516" containerName="extract-content" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.690790 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="7be49ee7-5b0d-44b7-84bb-0e23c542d516" containerName="extract-content" Dec 02 14:53:37 crc kubenswrapper[4902]: E1202 14:53:37.690809 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7be49ee7-5b0d-44b7-84bb-0e23c542d516" containerName="extract-utilities" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.690819 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="7be49ee7-5b0d-44b7-84bb-0e23c542d516" containerName="extract-utilities" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.691062 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b639ec2-25b0-47b7-8e8e-4ff7fa466e47" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.691098 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="7be49ee7-5b0d-44b7-84bb-0e23c542d516" containerName="registry-server" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.691896 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.695793 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.700904 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.701352 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.702039 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.703132 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.734681 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl"] Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.789011 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.789406 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.789526 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.789633 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.789670 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hc99r\" (UniqueName: \"kubernetes.io/projected/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-kube-api-access-hc99r\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.892165 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.892296 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.892329 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.892356 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hc99r\" (UniqueName: \"kubernetes.io/projected/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-kube-api-access-hc99r\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.892377 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.896131 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.896833 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.897251 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.903237 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:37 crc kubenswrapper[4902]: I1202 14:53:37.918025 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hc99r\" (UniqueName: \"kubernetes.io/projected/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-kube-api-access-hc99r\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:38 crc kubenswrapper[4902]: I1202 14:53:38.022415 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:53:38 crc kubenswrapper[4902]: I1202 14:53:38.613533 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl"] Dec 02 14:53:39 crc kubenswrapper[4902]: I1202 14:53:39.535159 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" event={"ID":"197fff36-7bd5-46a0-a0e1-5b986b4cfc61","Type":"ContainerStarted","Data":"5a053dc5a3e6f106047e2aa11da67aaf100fe03c5c2ff523684c0efbf64221aa"} Dec 02 14:53:40 crc kubenswrapper[4902]: I1202 14:53:40.545879 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" event={"ID":"197fff36-7bd5-46a0-a0e1-5b986b4cfc61","Type":"ContainerStarted","Data":"9ec9c8c2da73cf77a42dac580d2addf3a9311f802808954f3311009131b1ca45"} Dec 02 14:53:40 crc kubenswrapper[4902]: I1202 14:53:40.573669 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" podStartSLOduration=2.3419988910000002 podStartE2EDuration="3.573644068s" podCreationTimestamp="2025-12-02 14:53:37 +0000 UTC" firstStartedPulling="2025-12-02 14:53:38.616343766 +0000 UTC m=+2249.807652475" lastFinishedPulling="2025-12-02 14:53:39.847988943 +0000 UTC m=+2251.039297652" observedRunningTime="2025-12-02 14:53:40.561314418 +0000 UTC m=+2251.752623147" watchObservedRunningTime="2025-12-02 14:53:40.573644068 +0000 UTC m=+2251.764952817" Dec 02 14:55:34 crc kubenswrapper[4902]: I1202 14:55:34.732359 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:55:34 crc kubenswrapper[4902]: I1202 14:55:34.733047 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:55:53 crc kubenswrapper[4902]: I1202 14:55:53.402129 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4h7k5"] Dec 02 14:55:53 crc kubenswrapper[4902]: I1202 14:55:53.406276 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:55:53 crc kubenswrapper[4902]: I1202 14:55:53.418478 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4h7k5"] Dec 02 14:55:53 crc kubenswrapper[4902]: I1202 14:55:53.575911 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce6d0e26-a411-4c0b-ad53-adc052961951-catalog-content\") pod \"certified-operators-4h7k5\" (UID: \"ce6d0e26-a411-4c0b-ad53-adc052961951\") " pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:55:53 crc kubenswrapper[4902]: I1202 14:55:53.576272 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9lpd\" (UniqueName: \"kubernetes.io/projected/ce6d0e26-a411-4c0b-ad53-adc052961951-kube-api-access-b9lpd\") pod \"certified-operators-4h7k5\" (UID: \"ce6d0e26-a411-4c0b-ad53-adc052961951\") " pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:55:53 crc kubenswrapper[4902]: I1202 14:55:53.576354 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce6d0e26-a411-4c0b-ad53-adc052961951-utilities\") pod \"certified-operators-4h7k5\" (UID: \"ce6d0e26-a411-4c0b-ad53-adc052961951\") " pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:55:53 crc kubenswrapper[4902]: I1202 14:55:53.678304 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce6d0e26-a411-4c0b-ad53-adc052961951-utilities\") pod \"certified-operators-4h7k5\" (UID: \"ce6d0e26-a411-4c0b-ad53-adc052961951\") " pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:55:53 crc kubenswrapper[4902]: I1202 14:55:53.678406 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce6d0e26-a411-4c0b-ad53-adc052961951-catalog-content\") pod \"certified-operators-4h7k5\" (UID: \"ce6d0e26-a411-4c0b-ad53-adc052961951\") " pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:55:53 crc kubenswrapper[4902]: I1202 14:55:53.678480 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9lpd\" (UniqueName: \"kubernetes.io/projected/ce6d0e26-a411-4c0b-ad53-adc052961951-kube-api-access-b9lpd\") pod \"certified-operators-4h7k5\" (UID: \"ce6d0e26-a411-4c0b-ad53-adc052961951\") " pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:55:53 crc kubenswrapper[4902]: I1202 14:55:53.678924 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce6d0e26-a411-4c0b-ad53-adc052961951-catalog-content\") pod \"certified-operators-4h7k5\" (UID: \"ce6d0e26-a411-4c0b-ad53-adc052961951\") " pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:55:53 crc kubenswrapper[4902]: I1202 14:55:53.678924 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce6d0e26-a411-4c0b-ad53-adc052961951-utilities\") pod \"certified-operators-4h7k5\" (UID: \"ce6d0e26-a411-4c0b-ad53-adc052961951\") " pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:55:53 crc kubenswrapper[4902]: I1202 14:55:53.705185 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9lpd\" (UniqueName: \"kubernetes.io/projected/ce6d0e26-a411-4c0b-ad53-adc052961951-kube-api-access-b9lpd\") pod \"certified-operators-4h7k5\" (UID: \"ce6d0e26-a411-4c0b-ad53-adc052961951\") " pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:55:53 crc kubenswrapper[4902]: I1202 14:55:53.752449 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:55:54 crc kubenswrapper[4902]: I1202 14:55:54.286308 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4h7k5"] Dec 02 14:55:54 crc kubenswrapper[4902]: I1202 14:55:54.930034 4902 generic.go:334] "Generic (PLEG): container finished" podID="ce6d0e26-a411-4c0b-ad53-adc052961951" containerID="ca9a0ed880df01927f39bed775d7a4fc86375b22f9adb4a2c0b18c0acc56daa7" exitCode=0 Dec 02 14:55:54 crc kubenswrapper[4902]: I1202 14:55:54.930088 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4h7k5" event={"ID":"ce6d0e26-a411-4c0b-ad53-adc052961951","Type":"ContainerDied","Data":"ca9a0ed880df01927f39bed775d7a4fc86375b22f9adb4a2c0b18c0acc56daa7"} Dec 02 14:55:54 crc kubenswrapper[4902]: I1202 14:55:54.930456 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4h7k5" event={"ID":"ce6d0e26-a411-4c0b-ad53-adc052961951","Type":"ContainerStarted","Data":"d8b0efb2e4c8cb64c8a394178dae40bca84f6b15a7640056a5d8ae860dcbe588"} Dec 02 14:55:54 crc kubenswrapper[4902]: I1202 14:55:54.932405 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 14:55:55 crc kubenswrapper[4902]: I1202 14:55:55.795483 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mplqj"] Dec 02 14:55:55 crc kubenswrapper[4902]: I1202 14:55:55.798039 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:55:55 crc kubenswrapper[4902]: I1202 14:55:55.811714 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mplqj"] Dec 02 14:55:55 crc kubenswrapper[4902]: I1202 14:55:55.924257 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk4fq\" (UniqueName: \"kubernetes.io/projected/488a9e8f-d229-4a78-ac56-dee271f98795-kube-api-access-sk4fq\") pod \"community-operators-mplqj\" (UID: \"488a9e8f-d229-4a78-ac56-dee271f98795\") " pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:55:55 crc kubenswrapper[4902]: I1202 14:55:55.924388 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/488a9e8f-d229-4a78-ac56-dee271f98795-utilities\") pod \"community-operators-mplqj\" (UID: \"488a9e8f-d229-4a78-ac56-dee271f98795\") " pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:55:55 crc kubenswrapper[4902]: I1202 14:55:55.924459 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/488a9e8f-d229-4a78-ac56-dee271f98795-catalog-content\") pod \"community-operators-mplqj\" (UID: \"488a9e8f-d229-4a78-ac56-dee271f98795\") " pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:55:55 crc kubenswrapper[4902]: I1202 14:55:55.942242 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4h7k5" event={"ID":"ce6d0e26-a411-4c0b-ad53-adc052961951","Type":"ContainerStarted","Data":"11713bca69590c2b9bc226a47bd5947e9d82ad5c42df98f5910040ca2f5f7324"} Dec 02 14:55:56 crc kubenswrapper[4902]: I1202 14:55:56.025987 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk4fq\" (UniqueName: \"kubernetes.io/projected/488a9e8f-d229-4a78-ac56-dee271f98795-kube-api-access-sk4fq\") pod \"community-operators-mplqj\" (UID: \"488a9e8f-d229-4a78-ac56-dee271f98795\") " pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:55:56 crc kubenswrapper[4902]: I1202 14:55:56.026439 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/488a9e8f-d229-4a78-ac56-dee271f98795-utilities\") pod \"community-operators-mplqj\" (UID: \"488a9e8f-d229-4a78-ac56-dee271f98795\") " pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:55:56 crc kubenswrapper[4902]: I1202 14:55:56.026683 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/488a9e8f-d229-4a78-ac56-dee271f98795-catalog-content\") pod \"community-operators-mplqj\" (UID: \"488a9e8f-d229-4a78-ac56-dee271f98795\") " pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:55:56 crc kubenswrapper[4902]: I1202 14:55:56.027421 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/488a9e8f-d229-4a78-ac56-dee271f98795-catalog-content\") pod \"community-operators-mplqj\" (UID: \"488a9e8f-d229-4a78-ac56-dee271f98795\") " pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:55:56 crc kubenswrapper[4902]: I1202 14:55:56.027995 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/488a9e8f-d229-4a78-ac56-dee271f98795-utilities\") pod \"community-operators-mplqj\" (UID: \"488a9e8f-d229-4a78-ac56-dee271f98795\") " pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:55:56 crc kubenswrapper[4902]: I1202 14:55:56.051387 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk4fq\" (UniqueName: \"kubernetes.io/projected/488a9e8f-d229-4a78-ac56-dee271f98795-kube-api-access-sk4fq\") pod \"community-operators-mplqj\" (UID: \"488a9e8f-d229-4a78-ac56-dee271f98795\") " pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:55:56 crc kubenswrapper[4902]: I1202 14:55:56.125306 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:55:56 crc kubenswrapper[4902]: I1202 14:55:56.763142 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mplqj"] Dec 02 14:55:56 crc kubenswrapper[4902]: W1202 14:55:56.767713 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod488a9e8f_d229_4a78_ac56_dee271f98795.slice/crio-3b9905818bc9979a99a26f2810db361cc7b4972b797edf78954075512e68c29d WatchSource:0}: Error finding container 3b9905818bc9979a99a26f2810db361cc7b4972b797edf78954075512e68c29d: Status 404 returned error can't find the container with id 3b9905818bc9979a99a26f2810db361cc7b4972b797edf78954075512e68c29d Dec 02 14:55:56 crc kubenswrapper[4902]: I1202 14:55:56.953556 4902 generic.go:334] "Generic (PLEG): container finished" podID="ce6d0e26-a411-4c0b-ad53-adc052961951" containerID="11713bca69590c2b9bc226a47bd5947e9d82ad5c42df98f5910040ca2f5f7324" exitCode=0 Dec 02 14:55:56 crc kubenswrapper[4902]: I1202 14:55:56.953610 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4h7k5" event={"ID":"ce6d0e26-a411-4c0b-ad53-adc052961951","Type":"ContainerDied","Data":"11713bca69590c2b9bc226a47bd5947e9d82ad5c42df98f5910040ca2f5f7324"} Dec 02 14:55:56 crc kubenswrapper[4902]: I1202 14:55:56.955975 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mplqj" event={"ID":"488a9e8f-d229-4a78-ac56-dee271f98795","Type":"ContainerStarted","Data":"3b9905818bc9979a99a26f2810db361cc7b4972b797edf78954075512e68c29d"} Dec 02 14:55:57 crc kubenswrapper[4902]: I1202 14:55:57.971330 4902 generic.go:334] "Generic (PLEG): container finished" podID="488a9e8f-d229-4a78-ac56-dee271f98795" containerID="47828f767d97051b8fff9407c525381e9d226be6d29dda03b07a451ead303f12" exitCode=0 Dec 02 14:55:57 crc kubenswrapper[4902]: I1202 14:55:57.971917 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mplqj" event={"ID":"488a9e8f-d229-4a78-ac56-dee271f98795","Type":"ContainerDied","Data":"47828f767d97051b8fff9407c525381e9d226be6d29dda03b07a451ead303f12"} Dec 02 14:55:58 crc kubenswrapper[4902]: I1202 14:55:58.982677 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4h7k5" event={"ID":"ce6d0e26-a411-4c0b-ad53-adc052961951","Type":"ContainerStarted","Data":"b6f05202cc6c9ced0edb3594211d013edaf29bed7c440a3a4aa2a6406626ceb3"} Dec 02 14:55:59 crc kubenswrapper[4902]: I1202 14:55:59.004285 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4h7k5" podStartSLOduration=2.430322045 podStartE2EDuration="6.004265733s" podCreationTimestamp="2025-12-02 14:55:53 +0000 UTC" firstStartedPulling="2025-12-02 14:55:54.93207942 +0000 UTC m=+2386.123388129" lastFinishedPulling="2025-12-02 14:55:58.506023088 +0000 UTC m=+2389.697331817" observedRunningTime="2025-12-02 14:55:59.00167683 +0000 UTC m=+2390.192985549" watchObservedRunningTime="2025-12-02 14:55:59.004265733 +0000 UTC m=+2390.195574442" Dec 02 14:55:59 crc kubenswrapper[4902]: I1202 14:55:59.991480 4902 generic.go:334] "Generic (PLEG): container finished" podID="488a9e8f-d229-4a78-ac56-dee271f98795" containerID="51a20d0aa4d46bf4ce080c1cc0a6f5e2b91cef1174e58d2410bb16b6b0c5b24b" exitCode=0 Dec 02 14:55:59 crc kubenswrapper[4902]: I1202 14:55:59.991588 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mplqj" event={"ID":"488a9e8f-d229-4a78-ac56-dee271f98795","Type":"ContainerDied","Data":"51a20d0aa4d46bf4ce080c1cc0a6f5e2b91cef1174e58d2410bb16b6b0c5b24b"} Dec 02 14:56:01 crc kubenswrapper[4902]: I1202 14:56:01.013335 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mplqj" event={"ID":"488a9e8f-d229-4a78-ac56-dee271f98795","Type":"ContainerStarted","Data":"17cf687a964b32ee4ef6d0a8f186f3a23567538cf533beb23b3aa5213be05c9a"} Dec 02 14:56:01 crc kubenswrapper[4902]: I1202 14:56:01.035837 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mplqj" podStartSLOduration=3.503639331 podStartE2EDuration="6.035821347s" podCreationTimestamp="2025-12-02 14:55:55 +0000 UTC" firstStartedPulling="2025-12-02 14:55:57.985999346 +0000 UTC m=+2389.177308075" lastFinishedPulling="2025-12-02 14:56:00.518181382 +0000 UTC m=+2391.709490091" observedRunningTime="2025-12-02 14:56:01.030848066 +0000 UTC m=+2392.222156775" watchObservedRunningTime="2025-12-02 14:56:01.035821347 +0000 UTC m=+2392.227130056" Dec 02 14:56:03 crc kubenswrapper[4902]: I1202 14:56:03.753509 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:56:03 crc kubenswrapper[4902]: I1202 14:56:03.753877 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:56:03 crc kubenswrapper[4902]: I1202 14:56:03.807991 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:56:04 crc kubenswrapper[4902]: I1202 14:56:04.118802 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:56:04 crc kubenswrapper[4902]: I1202 14:56:04.589046 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4h7k5"] Dec 02 14:56:04 crc kubenswrapper[4902]: I1202 14:56:04.732000 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:56:04 crc kubenswrapper[4902]: I1202 14:56:04.732084 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:56:06 crc kubenswrapper[4902]: I1202 14:56:06.058321 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4h7k5" podUID="ce6d0e26-a411-4c0b-ad53-adc052961951" containerName="registry-server" containerID="cri-o://b6f05202cc6c9ced0edb3594211d013edaf29bed7c440a3a4aa2a6406626ceb3" gracePeriod=2 Dec 02 14:56:06 crc kubenswrapper[4902]: I1202 14:56:06.126329 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:56:06 crc kubenswrapper[4902]: I1202 14:56:06.128405 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:56:06 crc kubenswrapper[4902]: I1202 14:56:06.202920 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:56:06 crc kubenswrapper[4902]: I1202 14:56:06.510107 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:56:06 crc kubenswrapper[4902]: I1202 14:56:06.665666 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9lpd\" (UniqueName: \"kubernetes.io/projected/ce6d0e26-a411-4c0b-ad53-adc052961951-kube-api-access-b9lpd\") pod \"ce6d0e26-a411-4c0b-ad53-adc052961951\" (UID: \"ce6d0e26-a411-4c0b-ad53-adc052961951\") " Dec 02 14:56:06 crc kubenswrapper[4902]: I1202 14:56:06.665787 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce6d0e26-a411-4c0b-ad53-adc052961951-catalog-content\") pod \"ce6d0e26-a411-4c0b-ad53-adc052961951\" (UID: \"ce6d0e26-a411-4c0b-ad53-adc052961951\") " Dec 02 14:56:06 crc kubenswrapper[4902]: I1202 14:56:06.665904 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce6d0e26-a411-4c0b-ad53-adc052961951-utilities\") pod \"ce6d0e26-a411-4c0b-ad53-adc052961951\" (UID: \"ce6d0e26-a411-4c0b-ad53-adc052961951\") " Dec 02 14:56:06 crc kubenswrapper[4902]: I1202 14:56:06.668436 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce6d0e26-a411-4c0b-ad53-adc052961951-utilities" (OuterVolumeSpecName: "utilities") pod "ce6d0e26-a411-4c0b-ad53-adc052961951" (UID: "ce6d0e26-a411-4c0b-ad53-adc052961951"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:56:06 crc kubenswrapper[4902]: I1202 14:56:06.673596 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce6d0e26-a411-4c0b-ad53-adc052961951-kube-api-access-b9lpd" (OuterVolumeSpecName: "kube-api-access-b9lpd") pod "ce6d0e26-a411-4c0b-ad53-adc052961951" (UID: "ce6d0e26-a411-4c0b-ad53-adc052961951"). InnerVolumeSpecName "kube-api-access-b9lpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:56:06 crc kubenswrapper[4902]: I1202 14:56:06.711872 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce6d0e26-a411-4c0b-ad53-adc052961951-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce6d0e26-a411-4c0b-ad53-adc052961951" (UID: "ce6d0e26-a411-4c0b-ad53-adc052961951"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:56:06 crc kubenswrapper[4902]: I1202 14:56:06.769445 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9lpd\" (UniqueName: \"kubernetes.io/projected/ce6d0e26-a411-4c0b-ad53-adc052961951-kube-api-access-b9lpd\") on node \"crc\" DevicePath \"\"" Dec 02 14:56:06 crc kubenswrapper[4902]: I1202 14:56:06.769482 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce6d0e26-a411-4c0b-ad53-adc052961951-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:56:06 crc kubenswrapper[4902]: I1202 14:56:06.769492 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce6d0e26-a411-4c0b-ad53-adc052961951-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.072855 4902 generic.go:334] "Generic (PLEG): container finished" podID="ce6d0e26-a411-4c0b-ad53-adc052961951" containerID="b6f05202cc6c9ced0edb3594211d013edaf29bed7c440a3a4aa2a6406626ceb3" exitCode=0 Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.072945 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4h7k5" event={"ID":"ce6d0e26-a411-4c0b-ad53-adc052961951","Type":"ContainerDied","Data":"b6f05202cc6c9ced0edb3594211d013edaf29bed7c440a3a4aa2a6406626ceb3"} Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.073162 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4h7k5" event={"ID":"ce6d0e26-a411-4c0b-ad53-adc052961951","Type":"ContainerDied","Data":"d8b0efb2e4c8cb64c8a394178dae40bca84f6b15a7640056a5d8ae860dcbe588"} Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.073199 4902 scope.go:117] "RemoveContainer" containerID="b6f05202cc6c9ced0edb3594211d013edaf29bed7c440a3a4aa2a6406626ceb3" Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.072996 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4h7k5" Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.107329 4902 scope.go:117] "RemoveContainer" containerID="11713bca69590c2b9bc226a47bd5947e9d82ad5c42df98f5910040ca2f5f7324" Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.139727 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4h7k5"] Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.141865 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4h7k5"] Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.147344 4902 scope.go:117] "RemoveContainer" containerID="ca9a0ed880df01927f39bed775d7a4fc86375b22f9adb4a2c0b18c0acc56daa7" Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.155145 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.197298 4902 scope.go:117] "RemoveContainer" containerID="b6f05202cc6c9ced0edb3594211d013edaf29bed7c440a3a4aa2a6406626ceb3" Dec 02 14:56:07 crc kubenswrapper[4902]: E1202 14:56:07.197823 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6f05202cc6c9ced0edb3594211d013edaf29bed7c440a3a4aa2a6406626ceb3\": container with ID starting with b6f05202cc6c9ced0edb3594211d013edaf29bed7c440a3a4aa2a6406626ceb3 not found: ID does not exist" containerID="b6f05202cc6c9ced0edb3594211d013edaf29bed7c440a3a4aa2a6406626ceb3" Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.197874 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6f05202cc6c9ced0edb3594211d013edaf29bed7c440a3a4aa2a6406626ceb3"} err="failed to get container status \"b6f05202cc6c9ced0edb3594211d013edaf29bed7c440a3a4aa2a6406626ceb3\": rpc error: code = NotFound desc = could not find container \"b6f05202cc6c9ced0edb3594211d013edaf29bed7c440a3a4aa2a6406626ceb3\": container with ID starting with b6f05202cc6c9ced0edb3594211d013edaf29bed7c440a3a4aa2a6406626ceb3 not found: ID does not exist" Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.197904 4902 scope.go:117] "RemoveContainer" containerID="11713bca69590c2b9bc226a47bd5947e9d82ad5c42df98f5910040ca2f5f7324" Dec 02 14:56:07 crc kubenswrapper[4902]: E1202 14:56:07.198341 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11713bca69590c2b9bc226a47bd5947e9d82ad5c42df98f5910040ca2f5f7324\": container with ID starting with 11713bca69590c2b9bc226a47bd5947e9d82ad5c42df98f5910040ca2f5f7324 not found: ID does not exist" containerID="11713bca69590c2b9bc226a47bd5947e9d82ad5c42df98f5910040ca2f5f7324" Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.198378 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11713bca69590c2b9bc226a47bd5947e9d82ad5c42df98f5910040ca2f5f7324"} err="failed to get container status \"11713bca69590c2b9bc226a47bd5947e9d82ad5c42df98f5910040ca2f5f7324\": rpc error: code = NotFound desc = could not find container \"11713bca69590c2b9bc226a47bd5947e9d82ad5c42df98f5910040ca2f5f7324\": container with ID starting with 11713bca69590c2b9bc226a47bd5947e9d82ad5c42df98f5910040ca2f5f7324 not found: ID does not exist" Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.198405 4902 scope.go:117] "RemoveContainer" containerID="ca9a0ed880df01927f39bed775d7a4fc86375b22f9adb4a2c0b18c0acc56daa7" Dec 02 14:56:07 crc kubenswrapper[4902]: E1202 14:56:07.198799 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca9a0ed880df01927f39bed775d7a4fc86375b22f9adb4a2c0b18c0acc56daa7\": container with ID starting with ca9a0ed880df01927f39bed775d7a4fc86375b22f9adb4a2c0b18c0acc56daa7 not found: ID does not exist" containerID="ca9a0ed880df01927f39bed775d7a4fc86375b22f9adb4a2c0b18c0acc56daa7" Dec 02 14:56:07 crc kubenswrapper[4902]: I1202 14:56:07.198823 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca9a0ed880df01927f39bed775d7a4fc86375b22f9adb4a2c0b18c0acc56daa7"} err="failed to get container status \"ca9a0ed880df01927f39bed775d7a4fc86375b22f9adb4a2c0b18c0acc56daa7\": rpc error: code = NotFound desc = could not find container \"ca9a0ed880df01927f39bed775d7a4fc86375b22f9adb4a2c0b18c0acc56daa7\": container with ID starting with ca9a0ed880df01927f39bed775d7a4fc86375b22f9adb4a2c0b18c0acc56daa7 not found: ID does not exist" Dec 02 14:56:08 crc kubenswrapper[4902]: I1202 14:56:08.588407 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mplqj"] Dec 02 14:56:09 crc kubenswrapper[4902]: I1202 14:56:09.126674 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce6d0e26-a411-4c0b-ad53-adc052961951" path="/var/lib/kubelet/pods/ce6d0e26-a411-4c0b-ad53-adc052961951/volumes" Dec 02 14:56:10 crc kubenswrapper[4902]: I1202 14:56:10.121334 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mplqj" podUID="488a9e8f-d229-4a78-ac56-dee271f98795" containerName="registry-server" containerID="cri-o://17cf687a964b32ee4ef6d0a8f186f3a23567538cf533beb23b3aa5213be05c9a" gracePeriod=2 Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.100786 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.137310 4902 generic.go:334] "Generic (PLEG): container finished" podID="488a9e8f-d229-4a78-ac56-dee271f98795" containerID="17cf687a964b32ee4ef6d0a8f186f3a23567538cf533beb23b3aa5213be05c9a" exitCode=0 Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.137373 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mplqj" event={"ID":"488a9e8f-d229-4a78-ac56-dee271f98795","Type":"ContainerDied","Data":"17cf687a964b32ee4ef6d0a8f186f3a23567538cf533beb23b3aa5213be05c9a"} Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.137418 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mplqj" event={"ID":"488a9e8f-d229-4a78-ac56-dee271f98795","Type":"ContainerDied","Data":"3b9905818bc9979a99a26f2810db361cc7b4972b797edf78954075512e68c29d"} Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.137451 4902 scope.go:117] "RemoveContainer" containerID="17cf687a964b32ee4ef6d0a8f186f3a23567538cf533beb23b3aa5213be05c9a" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.138234 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mplqj" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.173632 4902 scope.go:117] "RemoveContainer" containerID="51a20d0aa4d46bf4ce080c1cc0a6f5e2b91cef1174e58d2410bb16b6b0c5b24b" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.206468 4902 scope.go:117] "RemoveContainer" containerID="47828f767d97051b8fff9407c525381e9d226be6d29dda03b07a451ead303f12" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.249933 4902 scope.go:117] "RemoveContainer" containerID="17cf687a964b32ee4ef6d0a8f186f3a23567538cf533beb23b3aa5213be05c9a" Dec 02 14:56:11 crc kubenswrapper[4902]: E1202 14:56:11.250617 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17cf687a964b32ee4ef6d0a8f186f3a23567538cf533beb23b3aa5213be05c9a\": container with ID starting with 17cf687a964b32ee4ef6d0a8f186f3a23567538cf533beb23b3aa5213be05c9a not found: ID does not exist" containerID="17cf687a964b32ee4ef6d0a8f186f3a23567538cf533beb23b3aa5213be05c9a" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.250674 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17cf687a964b32ee4ef6d0a8f186f3a23567538cf533beb23b3aa5213be05c9a"} err="failed to get container status \"17cf687a964b32ee4ef6d0a8f186f3a23567538cf533beb23b3aa5213be05c9a\": rpc error: code = NotFound desc = could not find container \"17cf687a964b32ee4ef6d0a8f186f3a23567538cf533beb23b3aa5213be05c9a\": container with ID starting with 17cf687a964b32ee4ef6d0a8f186f3a23567538cf533beb23b3aa5213be05c9a not found: ID does not exist" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.250706 4902 scope.go:117] "RemoveContainer" containerID="51a20d0aa4d46bf4ce080c1cc0a6f5e2b91cef1174e58d2410bb16b6b0c5b24b" Dec 02 14:56:11 crc kubenswrapper[4902]: E1202 14:56:11.251166 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51a20d0aa4d46bf4ce080c1cc0a6f5e2b91cef1174e58d2410bb16b6b0c5b24b\": container with ID starting with 51a20d0aa4d46bf4ce080c1cc0a6f5e2b91cef1174e58d2410bb16b6b0c5b24b not found: ID does not exist" containerID="51a20d0aa4d46bf4ce080c1cc0a6f5e2b91cef1174e58d2410bb16b6b0c5b24b" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.251201 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51a20d0aa4d46bf4ce080c1cc0a6f5e2b91cef1174e58d2410bb16b6b0c5b24b"} err="failed to get container status \"51a20d0aa4d46bf4ce080c1cc0a6f5e2b91cef1174e58d2410bb16b6b0c5b24b\": rpc error: code = NotFound desc = could not find container \"51a20d0aa4d46bf4ce080c1cc0a6f5e2b91cef1174e58d2410bb16b6b0c5b24b\": container with ID starting with 51a20d0aa4d46bf4ce080c1cc0a6f5e2b91cef1174e58d2410bb16b6b0c5b24b not found: ID does not exist" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.251228 4902 scope.go:117] "RemoveContainer" containerID="47828f767d97051b8fff9407c525381e9d226be6d29dda03b07a451ead303f12" Dec 02 14:56:11 crc kubenswrapper[4902]: E1202 14:56:11.251522 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47828f767d97051b8fff9407c525381e9d226be6d29dda03b07a451ead303f12\": container with ID starting with 47828f767d97051b8fff9407c525381e9d226be6d29dda03b07a451ead303f12 not found: ID does not exist" containerID="47828f767d97051b8fff9407c525381e9d226be6d29dda03b07a451ead303f12" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.251543 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47828f767d97051b8fff9407c525381e9d226be6d29dda03b07a451ead303f12"} err="failed to get container status \"47828f767d97051b8fff9407c525381e9d226be6d29dda03b07a451ead303f12\": rpc error: code = NotFound desc = could not find container \"47828f767d97051b8fff9407c525381e9d226be6d29dda03b07a451ead303f12\": container with ID starting with 47828f767d97051b8fff9407c525381e9d226be6d29dda03b07a451ead303f12 not found: ID does not exist" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.265527 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/488a9e8f-d229-4a78-ac56-dee271f98795-utilities\") pod \"488a9e8f-d229-4a78-ac56-dee271f98795\" (UID: \"488a9e8f-d229-4a78-ac56-dee271f98795\") " Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.265858 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/488a9e8f-d229-4a78-ac56-dee271f98795-catalog-content\") pod \"488a9e8f-d229-4a78-ac56-dee271f98795\" (UID: \"488a9e8f-d229-4a78-ac56-dee271f98795\") " Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.265933 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sk4fq\" (UniqueName: \"kubernetes.io/projected/488a9e8f-d229-4a78-ac56-dee271f98795-kube-api-access-sk4fq\") pod \"488a9e8f-d229-4a78-ac56-dee271f98795\" (UID: \"488a9e8f-d229-4a78-ac56-dee271f98795\") " Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.266708 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/488a9e8f-d229-4a78-ac56-dee271f98795-utilities" (OuterVolumeSpecName: "utilities") pod "488a9e8f-d229-4a78-ac56-dee271f98795" (UID: "488a9e8f-d229-4a78-ac56-dee271f98795"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.276717 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/488a9e8f-d229-4a78-ac56-dee271f98795-kube-api-access-sk4fq" (OuterVolumeSpecName: "kube-api-access-sk4fq") pod "488a9e8f-d229-4a78-ac56-dee271f98795" (UID: "488a9e8f-d229-4a78-ac56-dee271f98795"). InnerVolumeSpecName "kube-api-access-sk4fq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.350348 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/488a9e8f-d229-4a78-ac56-dee271f98795-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "488a9e8f-d229-4a78-ac56-dee271f98795" (UID: "488a9e8f-d229-4a78-ac56-dee271f98795"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.368806 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/488a9e8f-d229-4a78-ac56-dee271f98795-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.368841 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/488a9e8f-d229-4a78-ac56-dee271f98795-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.368857 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sk4fq\" (UniqueName: \"kubernetes.io/projected/488a9e8f-d229-4a78-ac56-dee271f98795-kube-api-access-sk4fq\") on node \"crc\" DevicePath \"\"" Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.473155 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mplqj"] Dec 02 14:56:11 crc kubenswrapper[4902]: I1202 14:56:11.487623 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mplqj"] Dec 02 14:56:13 crc kubenswrapper[4902]: I1202 14:56:13.118465 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="488a9e8f-d229-4a78-ac56-dee271f98795" path="/var/lib/kubelet/pods/488a9e8f-d229-4a78-ac56-dee271f98795/volumes" Dec 02 14:56:34 crc kubenswrapper[4902]: I1202 14:56:34.731520 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 14:56:34 crc kubenswrapper[4902]: I1202 14:56:34.732099 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 14:56:34 crc kubenswrapper[4902]: I1202 14:56:34.732141 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 14:56:34 crc kubenswrapper[4902]: I1202 14:56:34.732922 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 14:56:34 crc kubenswrapper[4902]: I1202 14:56:34.732979 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" gracePeriod=600 Dec 02 14:56:34 crc kubenswrapper[4902]: E1202 14:56:34.874757 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:56:35 crc kubenswrapper[4902]: I1202 14:56:35.395475 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" exitCode=0 Dec 02 14:56:35 crc kubenswrapper[4902]: I1202 14:56:35.395544 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90"} Dec 02 14:56:35 crc kubenswrapper[4902]: I1202 14:56:35.395649 4902 scope.go:117] "RemoveContainer" containerID="d7fd0c2463f17287d481fe5e84abc83416fa52dc118f1290102b095ff55ebf6f" Dec 02 14:56:35 crc kubenswrapper[4902]: I1202 14:56:35.396776 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:56:35 crc kubenswrapper[4902]: E1202 14:56:35.397368 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:56:46 crc kubenswrapper[4902]: I1202 14:56:46.107143 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:56:46 crc kubenswrapper[4902]: E1202 14:56:46.108099 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:56:57 crc kubenswrapper[4902]: I1202 14:56:57.108164 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:56:57 crc kubenswrapper[4902]: E1202 14:56:57.109380 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:57:12 crc kubenswrapper[4902]: I1202 14:57:12.107679 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:57:12 crc kubenswrapper[4902]: E1202 14:57:12.108552 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:57:26 crc kubenswrapper[4902]: I1202 14:57:26.108589 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:57:26 crc kubenswrapper[4902]: E1202 14:57:26.109480 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:57:41 crc kubenswrapper[4902]: I1202 14:57:41.107189 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:57:41 crc kubenswrapper[4902]: E1202 14:57:41.109945 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:57:56 crc kubenswrapper[4902]: I1202 14:57:56.107468 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:57:56 crc kubenswrapper[4902]: E1202 14:57:56.108971 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:58:06 crc kubenswrapper[4902]: I1202 14:58:06.406280 4902 generic.go:334] "Generic (PLEG): container finished" podID="197fff36-7bd5-46a0-a0e1-5b986b4cfc61" containerID="9ec9c8c2da73cf77a42dac580d2addf3a9311f802808954f3311009131b1ca45" exitCode=0 Dec 02 14:58:06 crc kubenswrapper[4902]: I1202 14:58:06.406386 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" event={"ID":"197fff36-7bd5-46a0-a0e1-5b986b4cfc61","Type":"ContainerDied","Data":"9ec9c8c2da73cf77a42dac580d2addf3a9311f802808954f3311009131b1ca45"} Dec 02 14:58:07 crc kubenswrapper[4902]: I1202 14:58:07.107163 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:58:07 crc kubenswrapper[4902]: E1202 14:58:07.108096 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:58:07 crc kubenswrapper[4902]: I1202 14:58:07.821445 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:58:07 crc kubenswrapper[4902]: I1202 14:58:07.961605 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-libvirt-secret-0\") pod \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " Dec 02 14:58:07 crc kubenswrapper[4902]: I1202 14:58:07.961830 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hc99r\" (UniqueName: \"kubernetes.io/projected/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-kube-api-access-hc99r\") pod \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " Dec 02 14:58:07 crc kubenswrapper[4902]: I1202 14:58:07.961949 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-inventory\") pod \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " Dec 02 14:58:07 crc kubenswrapper[4902]: I1202 14:58:07.962016 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-ssh-key\") pod \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " Dec 02 14:58:07 crc kubenswrapper[4902]: I1202 14:58:07.962266 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-libvirt-combined-ca-bundle\") pod \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\" (UID: \"197fff36-7bd5-46a0-a0e1-5b986b4cfc61\") " Dec 02 14:58:07 crc kubenswrapper[4902]: I1202 14:58:07.968253 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "197fff36-7bd5-46a0-a0e1-5b986b4cfc61" (UID: "197fff36-7bd5-46a0-a0e1-5b986b4cfc61"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:58:07 crc kubenswrapper[4902]: I1202 14:58:07.973770 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-kube-api-access-hc99r" (OuterVolumeSpecName: "kube-api-access-hc99r") pod "197fff36-7bd5-46a0-a0e1-5b986b4cfc61" (UID: "197fff36-7bd5-46a0-a0e1-5b986b4cfc61"). InnerVolumeSpecName "kube-api-access-hc99r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 14:58:07 crc kubenswrapper[4902]: I1202 14:58:07.990704 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-inventory" (OuterVolumeSpecName: "inventory") pod "197fff36-7bd5-46a0-a0e1-5b986b4cfc61" (UID: "197fff36-7bd5-46a0-a0e1-5b986b4cfc61"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:58:07 crc kubenswrapper[4902]: I1202 14:58:07.993061 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "197fff36-7bd5-46a0-a0e1-5b986b4cfc61" (UID: "197fff36-7bd5-46a0-a0e1-5b986b4cfc61"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:58:07 crc kubenswrapper[4902]: I1202 14:58:07.995266 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "197fff36-7bd5-46a0-a0e1-5b986b4cfc61" (UID: "197fff36-7bd5-46a0-a0e1-5b986b4cfc61"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.064886 4902 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.065039 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hc99r\" (UniqueName: \"kubernetes.io/projected/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-kube-api-access-hc99r\") on node \"crc\" DevicePath \"\"" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.065099 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.065157 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.065215 4902 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/197fff36-7bd5-46a0-a0e1-5b986b4cfc61-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.436417 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" event={"ID":"197fff36-7bd5-46a0-a0e1-5b986b4cfc61","Type":"ContainerDied","Data":"5a053dc5a3e6f106047e2aa11da67aaf100fe03c5c2ff523684c0efbf64221aa"} Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.436505 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a053dc5a3e6f106047e2aa11da67aaf100fe03c5c2ff523684c0efbf64221aa" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.436535 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.557024 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms"] Dec 02 14:58:08 crc kubenswrapper[4902]: E1202 14:58:08.557503 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce6d0e26-a411-4c0b-ad53-adc052961951" containerName="registry-server" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.557529 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce6d0e26-a411-4c0b-ad53-adc052961951" containerName="registry-server" Dec 02 14:58:08 crc kubenswrapper[4902]: E1202 14:58:08.557550 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="197fff36-7bd5-46a0-a0e1-5b986b4cfc61" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.557576 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="197fff36-7bd5-46a0-a0e1-5b986b4cfc61" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 02 14:58:08 crc kubenswrapper[4902]: E1202 14:58:08.557599 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="488a9e8f-d229-4a78-ac56-dee271f98795" containerName="extract-content" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.557607 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="488a9e8f-d229-4a78-ac56-dee271f98795" containerName="extract-content" Dec 02 14:58:08 crc kubenswrapper[4902]: E1202 14:58:08.557634 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="488a9e8f-d229-4a78-ac56-dee271f98795" containerName="extract-utilities" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.557643 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="488a9e8f-d229-4a78-ac56-dee271f98795" containerName="extract-utilities" Dec 02 14:58:08 crc kubenswrapper[4902]: E1202 14:58:08.557664 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="488a9e8f-d229-4a78-ac56-dee271f98795" containerName="registry-server" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.557673 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="488a9e8f-d229-4a78-ac56-dee271f98795" containerName="registry-server" Dec 02 14:58:08 crc kubenswrapper[4902]: E1202 14:58:08.557707 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce6d0e26-a411-4c0b-ad53-adc052961951" containerName="extract-utilities" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.557715 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce6d0e26-a411-4c0b-ad53-adc052961951" containerName="extract-utilities" Dec 02 14:58:08 crc kubenswrapper[4902]: E1202 14:58:08.557725 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce6d0e26-a411-4c0b-ad53-adc052961951" containerName="extract-content" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.557735 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce6d0e26-a411-4c0b-ad53-adc052961951" containerName="extract-content" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.557990 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="488a9e8f-d229-4a78-ac56-dee271f98795" containerName="registry-server" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.558012 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="197fff36-7bd5-46a0-a0e1-5b986b4cfc61" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.558033 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce6d0e26-a411-4c0b-ad53-adc052961951" containerName="registry-server" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.558960 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.565502 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.565781 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.565953 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.566133 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.566288 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.566462 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.567093 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.576379 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms"] Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.693993 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.694036 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.694065 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/61892f44-715d-453e-83fd-b62cd886d24e-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.694108 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4mzn\" (UniqueName: \"kubernetes.io/projected/61892f44-715d-453e-83fd-b62cd886d24e-kube-api-access-j4mzn\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.694133 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.694181 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.694252 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.694308 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.694340 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.796216 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.796258 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.796289 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/61892f44-715d-453e-83fd-b62cd886d24e-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.796337 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4mzn\" (UniqueName: \"kubernetes.io/projected/61892f44-715d-453e-83fd-b62cd886d24e-kube-api-access-j4mzn\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.796371 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.796424 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.796451 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.796498 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.796525 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.797465 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/61892f44-715d-453e-83fd-b62cd886d24e-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.802444 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.802682 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.803924 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.805085 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.805262 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.805277 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.809498 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.818846 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4mzn\" (UniqueName: \"kubernetes.io/projected/61892f44-715d-453e-83fd-b62cd886d24e-kube-api-access-j4mzn\") pod \"nova-edpm-deployment-openstack-edpm-ipam-vmxms\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:08 crc kubenswrapper[4902]: I1202 14:58:08.893586 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 14:58:09 crc kubenswrapper[4902]: I1202 14:58:09.496069 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms"] Dec 02 14:58:10 crc kubenswrapper[4902]: I1202 14:58:10.179213 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 14:58:10 crc kubenswrapper[4902]: I1202 14:58:10.464822 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" event={"ID":"61892f44-715d-453e-83fd-b62cd886d24e","Type":"ContainerStarted","Data":"12b07b51c8d4935743dd33f2ab1a6e06617b98a08532f1cfa045c6b248b79841"} Dec 02 14:58:11 crc kubenswrapper[4902]: I1202 14:58:11.478386 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" event={"ID":"61892f44-715d-453e-83fd-b62cd886d24e","Type":"ContainerStarted","Data":"e839b0f15a3a69e79cca13ce24d9f197bffb5fe33e1a5e8798c8bd73fc8f84ff"} Dec 02 14:58:11 crc kubenswrapper[4902]: I1202 14:58:11.497413 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" podStartSLOduration=2.824671501 podStartE2EDuration="3.497384934s" podCreationTimestamp="2025-12-02 14:58:08 +0000 UTC" firstStartedPulling="2025-12-02 14:58:09.503299715 +0000 UTC m=+2520.694608434" lastFinishedPulling="2025-12-02 14:58:10.176013138 +0000 UTC m=+2521.367321867" observedRunningTime="2025-12-02 14:58:11.492322871 +0000 UTC m=+2522.683631620" watchObservedRunningTime="2025-12-02 14:58:11.497384934 +0000 UTC m=+2522.688693643" Dec 02 14:58:18 crc kubenswrapper[4902]: I1202 14:58:18.106834 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:58:18 crc kubenswrapper[4902]: E1202 14:58:18.107702 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:58:30 crc kubenswrapper[4902]: I1202 14:58:30.108370 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:58:30 crc kubenswrapper[4902]: E1202 14:58:30.109101 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:58:44 crc kubenswrapper[4902]: I1202 14:58:44.107006 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:58:44 crc kubenswrapper[4902]: E1202 14:58:44.107709 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:58:57 crc kubenswrapper[4902]: I1202 14:58:57.106508 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:58:57 crc kubenswrapper[4902]: E1202 14:58:57.107218 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:59:09 crc kubenswrapper[4902]: I1202 14:59:09.106631 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:59:09 crc kubenswrapper[4902]: E1202 14:59:09.135779 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:59:22 crc kubenswrapper[4902]: I1202 14:59:22.106418 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:59:22 crc kubenswrapper[4902]: E1202 14:59:22.107229 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:59:37 crc kubenswrapper[4902]: I1202 14:59:37.107600 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:59:37 crc kubenswrapper[4902]: E1202 14:59:37.108887 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 14:59:50 crc kubenswrapper[4902]: I1202 14:59:50.107883 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 14:59:50 crc kubenswrapper[4902]: E1202 14:59:50.109441 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.165087 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb"] Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.166866 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.171947 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.172158 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.192852 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb"] Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.339931 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ngzs\" (UniqueName: \"kubernetes.io/projected/f92b5dcc-2f9b-440a-8c19-adea872f9df9-kube-api-access-8ngzs\") pod \"collect-profiles-29411460-2r9xb\" (UID: \"f92b5dcc-2f9b-440a-8c19-adea872f9df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.340090 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f92b5dcc-2f9b-440a-8c19-adea872f9df9-config-volume\") pod \"collect-profiles-29411460-2r9xb\" (UID: \"f92b5dcc-2f9b-440a-8c19-adea872f9df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.340233 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f92b5dcc-2f9b-440a-8c19-adea872f9df9-secret-volume\") pod \"collect-profiles-29411460-2r9xb\" (UID: \"f92b5dcc-2f9b-440a-8c19-adea872f9df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.442404 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ngzs\" (UniqueName: \"kubernetes.io/projected/f92b5dcc-2f9b-440a-8c19-adea872f9df9-kube-api-access-8ngzs\") pod \"collect-profiles-29411460-2r9xb\" (UID: \"f92b5dcc-2f9b-440a-8c19-adea872f9df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.442476 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f92b5dcc-2f9b-440a-8c19-adea872f9df9-config-volume\") pod \"collect-profiles-29411460-2r9xb\" (UID: \"f92b5dcc-2f9b-440a-8c19-adea872f9df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.442554 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f92b5dcc-2f9b-440a-8c19-adea872f9df9-secret-volume\") pod \"collect-profiles-29411460-2r9xb\" (UID: \"f92b5dcc-2f9b-440a-8c19-adea872f9df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.443845 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f92b5dcc-2f9b-440a-8c19-adea872f9df9-config-volume\") pod \"collect-profiles-29411460-2r9xb\" (UID: \"f92b5dcc-2f9b-440a-8c19-adea872f9df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.453722 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f92b5dcc-2f9b-440a-8c19-adea872f9df9-secret-volume\") pod \"collect-profiles-29411460-2r9xb\" (UID: \"f92b5dcc-2f9b-440a-8c19-adea872f9df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.470697 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ngzs\" (UniqueName: \"kubernetes.io/projected/f92b5dcc-2f9b-440a-8c19-adea872f9df9-kube-api-access-8ngzs\") pod \"collect-profiles-29411460-2r9xb\" (UID: \"f92b5dcc-2f9b-440a-8c19-adea872f9df9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.489062 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" Dec 02 15:00:00 crc kubenswrapper[4902]: I1202 15:00:00.965095 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb"] Dec 02 15:00:01 crc kubenswrapper[4902]: I1202 15:00:01.648889 4902 generic.go:334] "Generic (PLEG): container finished" podID="f92b5dcc-2f9b-440a-8c19-adea872f9df9" containerID="4ae679602af91e76f65d50416b442f1ab633cd92983ad6947a5890d2e334db72" exitCode=0 Dec 02 15:00:01 crc kubenswrapper[4902]: I1202 15:00:01.649190 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" event={"ID":"f92b5dcc-2f9b-440a-8c19-adea872f9df9","Type":"ContainerDied","Data":"4ae679602af91e76f65d50416b442f1ab633cd92983ad6947a5890d2e334db72"} Dec 02 15:00:01 crc kubenswrapper[4902]: I1202 15:00:01.650794 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" event={"ID":"f92b5dcc-2f9b-440a-8c19-adea872f9df9","Type":"ContainerStarted","Data":"9ad884cf8522ced82ca281c5799781bdf345392868d042a908b3495bc4a64845"} Dec 02 15:00:02 crc kubenswrapper[4902]: I1202 15:00:02.106757 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 15:00:02 crc kubenswrapper[4902]: E1202 15:00:02.107270 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:00:03 crc kubenswrapper[4902]: I1202 15:00:03.004286 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" Dec 02 15:00:03 crc kubenswrapper[4902]: I1202 15:00:03.198893 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ngzs\" (UniqueName: \"kubernetes.io/projected/f92b5dcc-2f9b-440a-8c19-adea872f9df9-kube-api-access-8ngzs\") pod \"f92b5dcc-2f9b-440a-8c19-adea872f9df9\" (UID: \"f92b5dcc-2f9b-440a-8c19-adea872f9df9\") " Dec 02 15:00:03 crc kubenswrapper[4902]: I1202 15:00:03.198975 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f92b5dcc-2f9b-440a-8c19-adea872f9df9-config-volume\") pod \"f92b5dcc-2f9b-440a-8c19-adea872f9df9\" (UID: \"f92b5dcc-2f9b-440a-8c19-adea872f9df9\") " Dec 02 15:00:03 crc kubenswrapper[4902]: I1202 15:00:03.199001 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f92b5dcc-2f9b-440a-8c19-adea872f9df9-secret-volume\") pod \"f92b5dcc-2f9b-440a-8c19-adea872f9df9\" (UID: \"f92b5dcc-2f9b-440a-8c19-adea872f9df9\") " Dec 02 15:00:03 crc kubenswrapper[4902]: I1202 15:00:03.199398 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f92b5dcc-2f9b-440a-8c19-adea872f9df9-config-volume" (OuterVolumeSpecName: "config-volume") pod "f92b5dcc-2f9b-440a-8c19-adea872f9df9" (UID: "f92b5dcc-2f9b-440a-8c19-adea872f9df9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 15:00:03 crc kubenswrapper[4902]: I1202 15:00:03.199479 4902 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f92b5dcc-2f9b-440a-8c19-adea872f9df9-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 15:00:03 crc kubenswrapper[4902]: I1202 15:00:03.213309 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f92b5dcc-2f9b-440a-8c19-adea872f9df9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f92b5dcc-2f9b-440a-8c19-adea872f9df9" (UID: "f92b5dcc-2f9b-440a-8c19-adea872f9df9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:00:03 crc kubenswrapper[4902]: I1202 15:00:03.213402 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f92b5dcc-2f9b-440a-8c19-adea872f9df9-kube-api-access-8ngzs" (OuterVolumeSpecName: "kube-api-access-8ngzs") pod "f92b5dcc-2f9b-440a-8c19-adea872f9df9" (UID: "f92b5dcc-2f9b-440a-8c19-adea872f9df9"). InnerVolumeSpecName "kube-api-access-8ngzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:00:03 crc kubenswrapper[4902]: I1202 15:00:03.300639 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ngzs\" (UniqueName: \"kubernetes.io/projected/f92b5dcc-2f9b-440a-8c19-adea872f9df9-kube-api-access-8ngzs\") on node \"crc\" DevicePath \"\"" Dec 02 15:00:03 crc kubenswrapper[4902]: I1202 15:00:03.300862 4902 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f92b5dcc-2f9b-440a-8c19-adea872f9df9-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 15:00:03 crc kubenswrapper[4902]: I1202 15:00:03.670939 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" event={"ID":"f92b5dcc-2f9b-440a-8c19-adea872f9df9","Type":"ContainerDied","Data":"9ad884cf8522ced82ca281c5799781bdf345392868d042a908b3495bc4a64845"} Dec 02 15:00:03 crc kubenswrapper[4902]: I1202 15:00:03.670985 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ad884cf8522ced82ca281c5799781bdf345392868d042a908b3495bc4a64845" Dec 02 15:00:03 crc kubenswrapper[4902]: I1202 15:00:03.671031 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb" Dec 02 15:00:04 crc kubenswrapper[4902]: I1202 15:00:04.081966 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx"] Dec 02 15:00:04 crc kubenswrapper[4902]: I1202 15:00:04.090824 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411415-rc7hx"] Dec 02 15:00:05 crc kubenswrapper[4902]: I1202 15:00:05.131680 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5059510-c9f9-4945-8964-b74b62a6352d" path="/var/lib/kubelet/pods/c5059510-c9f9-4945-8964-b74b62a6352d/volumes" Dec 02 15:00:14 crc kubenswrapper[4902]: I1202 15:00:14.108432 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 15:00:14 crc kubenswrapper[4902]: E1202 15:00:14.109647 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:00:27 crc kubenswrapper[4902]: I1202 15:00:27.106764 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 15:00:27 crc kubenswrapper[4902]: E1202 15:00:27.107528 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:00:32 crc kubenswrapper[4902]: I1202 15:00:32.867521 4902 scope.go:117] "RemoveContainer" containerID="aba92fd5c955a2cab29276e67ea475ddd56913795cfb342dc1f00a414d5af05d" Dec 02 15:00:38 crc kubenswrapper[4902]: I1202 15:00:38.107110 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 15:00:38 crc kubenswrapper[4902]: E1202 15:00:38.107924 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:00:53 crc kubenswrapper[4902]: I1202 15:00:53.106668 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 15:00:53 crc kubenswrapper[4902]: E1202 15:00:53.107494 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.158963 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29411461-gmgwz"] Dec 02 15:01:00 crc kubenswrapper[4902]: E1202 15:01:00.159819 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f92b5dcc-2f9b-440a-8c19-adea872f9df9" containerName="collect-profiles" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.159835 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f92b5dcc-2f9b-440a-8c19-adea872f9df9" containerName="collect-profiles" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.160069 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f92b5dcc-2f9b-440a-8c19-adea872f9df9" containerName="collect-profiles" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.160767 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.170794 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29411461-gmgwz"] Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.244433 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-combined-ca-bundle\") pod \"keystone-cron-29411461-gmgwz\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.244693 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-fernet-keys\") pod \"keystone-cron-29411461-gmgwz\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.245101 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6ddg\" (UniqueName: \"kubernetes.io/projected/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-kube-api-access-p6ddg\") pod \"keystone-cron-29411461-gmgwz\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.245199 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-config-data\") pod \"keystone-cron-29411461-gmgwz\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.350848 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6ddg\" (UniqueName: \"kubernetes.io/projected/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-kube-api-access-p6ddg\") pod \"keystone-cron-29411461-gmgwz\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.350921 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-config-data\") pod \"keystone-cron-29411461-gmgwz\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.351024 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-combined-ca-bundle\") pod \"keystone-cron-29411461-gmgwz\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.353160 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-fernet-keys\") pod \"keystone-cron-29411461-gmgwz\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.357467 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-fernet-keys\") pod \"keystone-cron-29411461-gmgwz\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.364892 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-combined-ca-bundle\") pod \"keystone-cron-29411461-gmgwz\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.365695 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-config-data\") pod \"keystone-cron-29411461-gmgwz\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.377029 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6ddg\" (UniqueName: \"kubernetes.io/projected/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-kube-api-access-p6ddg\") pod \"keystone-cron-29411461-gmgwz\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.502464 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:00 crc kubenswrapper[4902]: I1202 15:01:00.994022 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29411461-gmgwz"] Dec 02 15:01:01 crc kubenswrapper[4902]: I1202 15:01:01.420348 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411461-gmgwz" event={"ID":"f2adf6c8-4207-43b6-9149-76b5e8e13c6a","Type":"ContainerStarted","Data":"b5bf38d91db61888a7d72df907387fb7965240ad9a6c25267046101b74575719"} Dec 02 15:01:01 crc kubenswrapper[4902]: I1202 15:01:01.420674 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411461-gmgwz" event={"ID":"f2adf6c8-4207-43b6-9149-76b5e8e13c6a","Type":"ContainerStarted","Data":"a9212745115917ba874c0f10a5b484566bbef5714eddefdd432ca7b04445f9ce"} Dec 02 15:01:02 crc kubenswrapper[4902]: I1202 15:01:02.456361 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29411461-gmgwz" podStartSLOduration=2.456327777 podStartE2EDuration="2.456327777s" podCreationTimestamp="2025-12-02 15:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 15:01:02.454431913 +0000 UTC m=+2693.645740622" watchObservedRunningTime="2025-12-02 15:01:02.456327777 +0000 UTC m=+2693.647636526" Dec 02 15:01:04 crc kubenswrapper[4902]: I1202 15:01:04.450736 4902 generic.go:334] "Generic (PLEG): container finished" podID="f2adf6c8-4207-43b6-9149-76b5e8e13c6a" containerID="b5bf38d91db61888a7d72df907387fb7965240ad9a6c25267046101b74575719" exitCode=0 Dec 02 15:01:04 crc kubenswrapper[4902]: I1202 15:01:04.450769 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411461-gmgwz" event={"ID":"f2adf6c8-4207-43b6-9149-76b5e8e13c6a","Type":"ContainerDied","Data":"b5bf38d91db61888a7d72df907387fb7965240ad9a6c25267046101b74575719"} Dec 02 15:01:05 crc kubenswrapper[4902]: I1202 15:01:05.868500 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:05 crc kubenswrapper[4902]: I1202 15:01:05.963636 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-config-data\") pod \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " Dec 02 15:01:05 crc kubenswrapper[4902]: I1202 15:01:05.963745 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6ddg\" (UniqueName: \"kubernetes.io/projected/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-kube-api-access-p6ddg\") pod \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " Dec 02 15:01:05 crc kubenswrapper[4902]: I1202 15:01:05.963787 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-fernet-keys\") pod \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " Dec 02 15:01:05 crc kubenswrapper[4902]: I1202 15:01:05.963915 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-combined-ca-bundle\") pod \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\" (UID: \"f2adf6c8-4207-43b6-9149-76b5e8e13c6a\") " Dec 02 15:01:05 crc kubenswrapper[4902]: I1202 15:01:05.969623 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-kube-api-access-p6ddg" (OuterVolumeSpecName: "kube-api-access-p6ddg") pod "f2adf6c8-4207-43b6-9149-76b5e8e13c6a" (UID: "f2adf6c8-4207-43b6-9149-76b5e8e13c6a"). InnerVolumeSpecName "kube-api-access-p6ddg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:01:05 crc kubenswrapper[4902]: I1202 15:01:05.981729 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "f2adf6c8-4207-43b6-9149-76b5e8e13c6a" (UID: "f2adf6c8-4207-43b6-9149-76b5e8e13c6a"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:01:06 crc kubenswrapper[4902]: I1202 15:01:06.008455 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2adf6c8-4207-43b6-9149-76b5e8e13c6a" (UID: "f2adf6c8-4207-43b6-9149-76b5e8e13c6a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:01:06 crc kubenswrapper[4902]: I1202 15:01:06.023652 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-config-data" (OuterVolumeSpecName: "config-data") pod "f2adf6c8-4207-43b6-9149-76b5e8e13c6a" (UID: "f2adf6c8-4207-43b6-9149-76b5e8e13c6a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:01:06 crc kubenswrapper[4902]: I1202 15:01:06.067095 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:06 crc kubenswrapper[4902]: I1202 15:01:06.067124 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6ddg\" (UniqueName: \"kubernetes.io/projected/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-kube-api-access-p6ddg\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:06 crc kubenswrapper[4902]: I1202 15:01:06.067136 4902 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:06 crc kubenswrapper[4902]: I1202 15:01:06.067144 4902 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2adf6c8-4207-43b6-9149-76b5e8e13c6a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:06 crc kubenswrapper[4902]: I1202 15:01:06.470942 4902 generic.go:334] "Generic (PLEG): container finished" podID="61892f44-715d-453e-83fd-b62cd886d24e" containerID="e839b0f15a3a69e79cca13ce24d9f197bffb5fe33e1a5e8798c8bd73fc8f84ff" exitCode=0 Dec 02 15:01:06 crc kubenswrapper[4902]: I1202 15:01:06.471006 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" event={"ID":"61892f44-715d-453e-83fd-b62cd886d24e","Type":"ContainerDied","Data":"e839b0f15a3a69e79cca13ce24d9f197bffb5fe33e1a5e8798c8bd73fc8f84ff"} Dec 02 15:01:06 crc kubenswrapper[4902]: I1202 15:01:06.472757 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411461-gmgwz" event={"ID":"f2adf6c8-4207-43b6-9149-76b5e8e13c6a","Type":"ContainerDied","Data":"a9212745115917ba874c0f10a5b484566bbef5714eddefdd432ca7b04445f9ce"} Dec 02 15:01:06 crc kubenswrapper[4902]: I1202 15:01:06.472791 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9212745115917ba874c0f10a5b484566bbef5714eddefdd432ca7b04445f9ce" Dec 02 15:01:06 crc kubenswrapper[4902]: I1202 15:01:06.472820 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411461-gmgwz" Dec 02 15:01:07 crc kubenswrapper[4902]: I1202 15:01:07.898128 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.016111 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-cell1-compute-config-1\") pod \"61892f44-715d-453e-83fd-b62cd886d24e\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.016413 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-migration-ssh-key-0\") pod \"61892f44-715d-453e-83fd-b62cd886d24e\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.016467 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4mzn\" (UniqueName: \"kubernetes.io/projected/61892f44-715d-453e-83fd-b62cd886d24e-kube-api-access-j4mzn\") pod \"61892f44-715d-453e-83fd-b62cd886d24e\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.016485 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/61892f44-715d-453e-83fd-b62cd886d24e-nova-extra-config-0\") pod \"61892f44-715d-453e-83fd-b62cd886d24e\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.016541 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-ssh-key\") pod \"61892f44-715d-453e-83fd-b62cd886d24e\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.016599 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-inventory\") pod \"61892f44-715d-453e-83fd-b62cd886d24e\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.017075 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-migration-ssh-key-1\") pod \"61892f44-715d-453e-83fd-b62cd886d24e\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.017134 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-combined-ca-bundle\") pod \"61892f44-715d-453e-83fd-b62cd886d24e\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.017167 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-cell1-compute-config-0\") pod \"61892f44-715d-453e-83fd-b62cd886d24e\" (UID: \"61892f44-715d-453e-83fd-b62cd886d24e\") " Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.033662 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61892f44-715d-453e-83fd-b62cd886d24e-kube-api-access-j4mzn" (OuterVolumeSpecName: "kube-api-access-j4mzn") pod "61892f44-715d-453e-83fd-b62cd886d24e" (UID: "61892f44-715d-453e-83fd-b62cd886d24e"). InnerVolumeSpecName "kube-api-access-j4mzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.042136 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "61892f44-715d-453e-83fd-b62cd886d24e" (UID: "61892f44-715d-453e-83fd-b62cd886d24e"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.048722 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "61892f44-715d-453e-83fd-b62cd886d24e" (UID: "61892f44-715d-453e-83fd-b62cd886d24e"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.056663 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61892f44-715d-453e-83fd-b62cd886d24e-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "61892f44-715d-453e-83fd-b62cd886d24e" (UID: "61892f44-715d-453e-83fd-b62cd886d24e"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.058755 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "61892f44-715d-453e-83fd-b62cd886d24e" (UID: "61892f44-715d-453e-83fd-b62cd886d24e"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.061009 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-inventory" (OuterVolumeSpecName: "inventory") pod "61892f44-715d-453e-83fd-b62cd886d24e" (UID: "61892f44-715d-453e-83fd-b62cd886d24e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.071991 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "61892f44-715d-453e-83fd-b62cd886d24e" (UID: "61892f44-715d-453e-83fd-b62cd886d24e"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.083856 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "61892f44-715d-453e-83fd-b62cd886d24e" (UID: "61892f44-715d-453e-83fd-b62cd886d24e"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.084495 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "61892f44-715d-453e-83fd-b62cd886d24e" (UID: "61892f44-715d-453e-83fd-b62cd886d24e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.108135 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 15:01:08 crc kubenswrapper[4902]: E1202 15:01:08.108395 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.123102 4902 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.123140 4902 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.123154 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4mzn\" (UniqueName: \"kubernetes.io/projected/61892f44-715d-453e-83fd-b62cd886d24e-kube-api-access-j4mzn\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.123164 4902 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/61892f44-715d-453e-83fd-b62cd886d24e-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.123174 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.123184 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.123192 4902 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.123201 4902 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.123211 4902 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/61892f44-715d-453e-83fd-b62cd886d24e-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.510852 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" event={"ID":"61892f44-715d-453e-83fd-b62cd886d24e","Type":"ContainerDied","Data":"12b07b51c8d4935743dd33f2ab1a6e06617b98a08532f1cfa045c6b248b79841"} Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.510901 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12b07b51c8d4935743dd33f2ab1a6e06617b98a08532f1cfa045c6b248b79841" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.511008 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-vmxms" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.605032 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w"] Dec 02 15:01:08 crc kubenswrapper[4902]: E1202 15:01:08.605518 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61892f44-715d-453e-83fd-b62cd886d24e" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.605541 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="61892f44-715d-453e-83fd-b62cd886d24e" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 02 15:01:08 crc kubenswrapper[4902]: E1202 15:01:08.605601 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2adf6c8-4207-43b6-9149-76b5e8e13c6a" containerName="keystone-cron" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.605611 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2adf6c8-4207-43b6-9149-76b5e8e13c6a" containerName="keystone-cron" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.605856 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="61892f44-715d-453e-83fd-b62cd886d24e" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.605876 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2adf6c8-4207-43b6-9149-76b5e8e13c6a" containerName="keystone-cron" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.606654 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.612934 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.612932 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.613063 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.613091 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p29wb" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.615753 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.622664 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w"] Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.636070 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.636132 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-447q4\" (UniqueName: \"kubernetes.io/projected/5bbd6a08-3913-4037-81e5-f7fd18479977-kube-api-access-447q4\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.636155 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.636177 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.636238 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.636298 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.636339 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.738244 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.738408 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.738479 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.738611 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.738689 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-447q4\" (UniqueName: \"kubernetes.io/projected/5bbd6a08-3913-4037-81e5-f7fd18479977-kube-api-access-447q4\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.738739 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.738781 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.744277 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.744312 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.744431 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.747983 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.751399 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.754581 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.761316 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-447q4\" (UniqueName: \"kubernetes.io/projected/5bbd6a08-3913-4037-81e5-f7fd18479977-kube-api-access-447q4\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:08 crc kubenswrapper[4902]: I1202 15:01:08.935076 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:01:09 crc kubenswrapper[4902]: I1202 15:01:09.522774 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w"] Dec 02 15:01:09 crc kubenswrapper[4902]: I1202 15:01:09.526144 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 15:01:10 crc kubenswrapper[4902]: I1202 15:01:10.104715 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 15:01:10 crc kubenswrapper[4902]: I1202 15:01:10.532531 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" event={"ID":"5bbd6a08-3913-4037-81e5-f7fd18479977","Type":"ContainerStarted","Data":"7c78f7b9a501746d50680daa4e06d253bd7be04a5cee71e42c015d111c0ff088"} Dec 02 15:01:10 crc kubenswrapper[4902]: I1202 15:01:10.532939 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" event={"ID":"5bbd6a08-3913-4037-81e5-f7fd18479977","Type":"ContainerStarted","Data":"02540323d2ed631085bd32df0de776f175637dccbea9b5714c88f0874f7f6862"} Dec 02 15:01:10 crc kubenswrapper[4902]: I1202 15:01:10.571973 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" podStartSLOduration=1.99529723 podStartE2EDuration="2.571955999s" podCreationTimestamp="2025-12-02 15:01:08 +0000 UTC" firstStartedPulling="2025-12-02 15:01:09.525805191 +0000 UTC m=+2700.717113900" lastFinishedPulling="2025-12-02 15:01:10.10246397 +0000 UTC m=+2701.293772669" observedRunningTime="2025-12-02 15:01:10.56635231 +0000 UTC m=+2701.757661019" watchObservedRunningTime="2025-12-02 15:01:10.571955999 +0000 UTC m=+2701.763264708" Dec 02 15:01:21 crc kubenswrapper[4902]: I1202 15:01:21.058543 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-h48rb"] Dec 02 15:01:21 crc kubenswrapper[4902]: I1202 15:01:21.061602 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:21 crc kubenswrapper[4902]: I1202 15:01:21.069039 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h48rb"] Dec 02 15:01:21 crc kubenswrapper[4902]: I1202 15:01:21.096000 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54xzl\" (UniqueName: \"kubernetes.io/projected/0e2a4fab-c869-4e79-978c-052e2c2d0d36-kube-api-access-54xzl\") pod \"redhat-operators-h48rb\" (UID: \"0e2a4fab-c869-4e79-978c-052e2c2d0d36\") " pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:21 crc kubenswrapper[4902]: I1202 15:01:21.096213 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e2a4fab-c869-4e79-978c-052e2c2d0d36-utilities\") pod \"redhat-operators-h48rb\" (UID: \"0e2a4fab-c869-4e79-978c-052e2c2d0d36\") " pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:21 crc kubenswrapper[4902]: I1202 15:01:21.096246 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e2a4fab-c869-4e79-978c-052e2c2d0d36-catalog-content\") pod \"redhat-operators-h48rb\" (UID: \"0e2a4fab-c869-4e79-978c-052e2c2d0d36\") " pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:21 crc kubenswrapper[4902]: I1202 15:01:21.197142 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e2a4fab-c869-4e79-978c-052e2c2d0d36-utilities\") pod \"redhat-operators-h48rb\" (UID: \"0e2a4fab-c869-4e79-978c-052e2c2d0d36\") " pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:21 crc kubenswrapper[4902]: I1202 15:01:21.197195 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e2a4fab-c869-4e79-978c-052e2c2d0d36-catalog-content\") pod \"redhat-operators-h48rb\" (UID: \"0e2a4fab-c869-4e79-978c-052e2c2d0d36\") " pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:21 crc kubenswrapper[4902]: I1202 15:01:21.197295 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54xzl\" (UniqueName: \"kubernetes.io/projected/0e2a4fab-c869-4e79-978c-052e2c2d0d36-kube-api-access-54xzl\") pod \"redhat-operators-h48rb\" (UID: \"0e2a4fab-c869-4e79-978c-052e2c2d0d36\") " pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:21 crc kubenswrapper[4902]: I1202 15:01:21.198440 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e2a4fab-c869-4e79-978c-052e2c2d0d36-utilities\") pod \"redhat-operators-h48rb\" (UID: \"0e2a4fab-c869-4e79-978c-052e2c2d0d36\") " pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:21 crc kubenswrapper[4902]: I1202 15:01:21.198520 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e2a4fab-c869-4e79-978c-052e2c2d0d36-catalog-content\") pod \"redhat-operators-h48rb\" (UID: \"0e2a4fab-c869-4e79-978c-052e2c2d0d36\") " pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:21 crc kubenswrapper[4902]: I1202 15:01:21.219314 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54xzl\" (UniqueName: \"kubernetes.io/projected/0e2a4fab-c869-4e79-978c-052e2c2d0d36-kube-api-access-54xzl\") pod \"redhat-operators-h48rb\" (UID: \"0e2a4fab-c869-4e79-978c-052e2c2d0d36\") " pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:21 crc kubenswrapper[4902]: I1202 15:01:21.388899 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:21 crc kubenswrapper[4902]: I1202 15:01:21.845205 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h48rb"] Dec 02 15:01:21 crc kubenswrapper[4902]: W1202 15:01:21.848584 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e2a4fab_c869_4e79_978c_052e2c2d0d36.slice/crio-360ce68a0fc10b7dd5e0153708409c0aa38d722ba835004d587813871ce81687 WatchSource:0}: Error finding container 360ce68a0fc10b7dd5e0153708409c0aa38d722ba835004d587813871ce81687: Status 404 returned error can't find the container with id 360ce68a0fc10b7dd5e0153708409c0aa38d722ba835004d587813871ce81687 Dec 02 15:01:22 crc kubenswrapper[4902]: I1202 15:01:22.107656 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 15:01:22 crc kubenswrapper[4902]: E1202 15:01:22.108232 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:01:22 crc kubenswrapper[4902]: I1202 15:01:22.740764 4902 generic.go:334] "Generic (PLEG): container finished" podID="0e2a4fab-c869-4e79-978c-052e2c2d0d36" containerID="06d88ad3e51a6fc1b7a9f7929f62c3f66fe581974e2595bbe7d5b8778fa230f7" exitCode=0 Dec 02 15:01:22 crc kubenswrapper[4902]: I1202 15:01:22.740832 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h48rb" event={"ID":"0e2a4fab-c869-4e79-978c-052e2c2d0d36","Type":"ContainerDied","Data":"06d88ad3e51a6fc1b7a9f7929f62c3f66fe581974e2595bbe7d5b8778fa230f7"} Dec 02 15:01:22 crc kubenswrapper[4902]: I1202 15:01:22.741069 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h48rb" event={"ID":"0e2a4fab-c869-4e79-978c-052e2c2d0d36","Type":"ContainerStarted","Data":"360ce68a0fc10b7dd5e0153708409c0aa38d722ba835004d587813871ce81687"} Dec 02 15:01:24 crc kubenswrapper[4902]: I1202 15:01:24.762460 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h48rb" event={"ID":"0e2a4fab-c869-4e79-978c-052e2c2d0d36","Type":"ContainerStarted","Data":"5764a272281607e6b6a607cfbee6b2152375b6d2da0876c313e374b72315ec1b"} Dec 02 15:01:25 crc kubenswrapper[4902]: I1202 15:01:25.774524 4902 generic.go:334] "Generic (PLEG): container finished" podID="0e2a4fab-c869-4e79-978c-052e2c2d0d36" containerID="5764a272281607e6b6a607cfbee6b2152375b6d2da0876c313e374b72315ec1b" exitCode=0 Dec 02 15:01:25 crc kubenswrapper[4902]: I1202 15:01:25.774604 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h48rb" event={"ID":"0e2a4fab-c869-4e79-978c-052e2c2d0d36","Type":"ContainerDied","Data":"5764a272281607e6b6a607cfbee6b2152375b6d2da0876c313e374b72315ec1b"} Dec 02 15:01:27 crc kubenswrapper[4902]: I1202 15:01:27.799372 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h48rb" event={"ID":"0e2a4fab-c869-4e79-978c-052e2c2d0d36","Type":"ContainerStarted","Data":"f781c97e37628d06ce5b76b4665a2111835d20e89aba393c05e59209e0cbebad"} Dec 02 15:01:27 crc kubenswrapper[4902]: I1202 15:01:27.830325 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-h48rb" podStartSLOduration=2.6592287409999997 podStartE2EDuration="6.830302659s" podCreationTimestamp="2025-12-02 15:01:21 +0000 UTC" firstStartedPulling="2025-12-02 15:01:22.742612207 +0000 UTC m=+2713.933920916" lastFinishedPulling="2025-12-02 15:01:26.913686125 +0000 UTC m=+2718.104994834" observedRunningTime="2025-12-02 15:01:27.824504984 +0000 UTC m=+2719.015813703" watchObservedRunningTime="2025-12-02 15:01:27.830302659 +0000 UTC m=+2719.021611378" Dec 02 15:01:31 crc kubenswrapper[4902]: I1202 15:01:31.389522 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:31 crc kubenswrapper[4902]: I1202 15:01:31.391621 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:32 crc kubenswrapper[4902]: I1202 15:01:32.461203 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-h48rb" podUID="0e2a4fab-c869-4e79-978c-052e2c2d0d36" containerName="registry-server" probeResult="failure" output=< Dec 02 15:01:32 crc kubenswrapper[4902]: timeout: failed to connect service ":50051" within 1s Dec 02 15:01:32 crc kubenswrapper[4902]: > Dec 02 15:01:36 crc kubenswrapper[4902]: I1202 15:01:36.108029 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 15:01:36 crc kubenswrapper[4902]: I1202 15:01:36.903934 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"9e888b9484cfa25ccd5bb37bb6aeb3dccfcd6727570895d583db033537ec4157"} Dec 02 15:01:41 crc kubenswrapper[4902]: I1202 15:01:41.445388 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:41 crc kubenswrapper[4902]: I1202 15:01:41.500777 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:41 crc kubenswrapper[4902]: I1202 15:01:41.689885 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h48rb"] Dec 02 15:01:42 crc kubenswrapper[4902]: I1202 15:01:42.986585 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-h48rb" podUID="0e2a4fab-c869-4e79-978c-052e2c2d0d36" containerName="registry-server" containerID="cri-o://f781c97e37628d06ce5b76b4665a2111835d20e89aba393c05e59209e0cbebad" gracePeriod=2 Dec 02 15:01:43 crc kubenswrapper[4902]: I1202 15:01:43.424914 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:43 crc kubenswrapper[4902]: I1202 15:01:43.574428 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e2a4fab-c869-4e79-978c-052e2c2d0d36-utilities\") pod \"0e2a4fab-c869-4e79-978c-052e2c2d0d36\" (UID: \"0e2a4fab-c869-4e79-978c-052e2c2d0d36\") " Dec 02 15:01:43 crc kubenswrapper[4902]: I1202 15:01:43.574588 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54xzl\" (UniqueName: \"kubernetes.io/projected/0e2a4fab-c869-4e79-978c-052e2c2d0d36-kube-api-access-54xzl\") pod \"0e2a4fab-c869-4e79-978c-052e2c2d0d36\" (UID: \"0e2a4fab-c869-4e79-978c-052e2c2d0d36\") " Dec 02 15:01:43 crc kubenswrapper[4902]: I1202 15:01:43.574661 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e2a4fab-c869-4e79-978c-052e2c2d0d36-catalog-content\") pod \"0e2a4fab-c869-4e79-978c-052e2c2d0d36\" (UID: \"0e2a4fab-c869-4e79-978c-052e2c2d0d36\") " Dec 02 15:01:43 crc kubenswrapper[4902]: I1202 15:01:43.575401 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e2a4fab-c869-4e79-978c-052e2c2d0d36-utilities" (OuterVolumeSpecName: "utilities") pod "0e2a4fab-c869-4e79-978c-052e2c2d0d36" (UID: "0e2a4fab-c869-4e79-978c-052e2c2d0d36"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:01:43 crc kubenswrapper[4902]: I1202 15:01:43.582156 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e2a4fab-c869-4e79-978c-052e2c2d0d36-kube-api-access-54xzl" (OuterVolumeSpecName: "kube-api-access-54xzl") pod "0e2a4fab-c869-4e79-978c-052e2c2d0d36" (UID: "0e2a4fab-c869-4e79-978c-052e2c2d0d36"). InnerVolumeSpecName "kube-api-access-54xzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:01:43 crc kubenswrapper[4902]: I1202 15:01:43.676814 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e2a4fab-c869-4e79-978c-052e2c2d0d36-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:43 crc kubenswrapper[4902]: I1202 15:01:43.676845 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54xzl\" (UniqueName: \"kubernetes.io/projected/0e2a4fab-c869-4e79-978c-052e2c2d0d36-kube-api-access-54xzl\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:43 crc kubenswrapper[4902]: I1202 15:01:43.693680 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e2a4fab-c869-4e79-978c-052e2c2d0d36-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0e2a4fab-c869-4e79-978c-052e2c2d0d36" (UID: "0e2a4fab-c869-4e79-978c-052e2c2d0d36"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:01:43 crc kubenswrapper[4902]: I1202 15:01:43.778906 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e2a4fab-c869-4e79-978c-052e2c2d0d36-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:01:44 crc kubenswrapper[4902]: I1202 15:01:44.000016 4902 generic.go:334] "Generic (PLEG): container finished" podID="0e2a4fab-c869-4e79-978c-052e2c2d0d36" containerID="f781c97e37628d06ce5b76b4665a2111835d20e89aba393c05e59209e0cbebad" exitCode=0 Dec 02 15:01:44 crc kubenswrapper[4902]: I1202 15:01:44.000105 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h48rb" Dec 02 15:01:44 crc kubenswrapper[4902]: I1202 15:01:44.000131 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h48rb" event={"ID":"0e2a4fab-c869-4e79-978c-052e2c2d0d36","Type":"ContainerDied","Data":"f781c97e37628d06ce5b76b4665a2111835d20e89aba393c05e59209e0cbebad"} Dec 02 15:01:44 crc kubenswrapper[4902]: I1202 15:01:44.001069 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h48rb" event={"ID":"0e2a4fab-c869-4e79-978c-052e2c2d0d36","Type":"ContainerDied","Data":"360ce68a0fc10b7dd5e0153708409c0aa38d722ba835004d587813871ce81687"} Dec 02 15:01:44 crc kubenswrapper[4902]: I1202 15:01:44.001120 4902 scope.go:117] "RemoveContainer" containerID="f781c97e37628d06ce5b76b4665a2111835d20e89aba393c05e59209e0cbebad" Dec 02 15:01:44 crc kubenswrapper[4902]: I1202 15:01:44.038342 4902 scope.go:117] "RemoveContainer" containerID="5764a272281607e6b6a607cfbee6b2152375b6d2da0876c313e374b72315ec1b" Dec 02 15:01:44 crc kubenswrapper[4902]: I1202 15:01:44.040800 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h48rb"] Dec 02 15:01:44 crc kubenswrapper[4902]: I1202 15:01:44.051484 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-h48rb"] Dec 02 15:01:44 crc kubenswrapper[4902]: I1202 15:01:44.067597 4902 scope.go:117] "RemoveContainer" containerID="06d88ad3e51a6fc1b7a9f7929f62c3f66fe581974e2595bbe7d5b8778fa230f7" Dec 02 15:01:44 crc kubenswrapper[4902]: I1202 15:01:44.122312 4902 scope.go:117] "RemoveContainer" containerID="f781c97e37628d06ce5b76b4665a2111835d20e89aba393c05e59209e0cbebad" Dec 02 15:01:44 crc kubenswrapper[4902]: E1202 15:01:44.122706 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f781c97e37628d06ce5b76b4665a2111835d20e89aba393c05e59209e0cbebad\": container with ID starting with f781c97e37628d06ce5b76b4665a2111835d20e89aba393c05e59209e0cbebad not found: ID does not exist" containerID="f781c97e37628d06ce5b76b4665a2111835d20e89aba393c05e59209e0cbebad" Dec 02 15:01:44 crc kubenswrapper[4902]: I1202 15:01:44.122737 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f781c97e37628d06ce5b76b4665a2111835d20e89aba393c05e59209e0cbebad"} err="failed to get container status \"f781c97e37628d06ce5b76b4665a2111835d20e89aba393c05e59209e0cbebad\": rpc error: code = NotFound desc = could not find container \"f781c97e37628d06ce5b76b4665a2111835d20e89aba393c05e59209e0cbebad\": container with ID starting with f781c97e37628d06ce5b76b4665a2111835d20e89aba393c05e59209e0cbebad not found: ID does not exist" Dec 02 15:01:44 crc kubenswrapper[4902]: I1202 15:01:44.122758 4902 scope.go:117] "RemoveContainer" containerID="5764a272281607e6b6a607cfbee6b2152375b6d2da0876c313e374b72315ec1b" Dec 02 15:01:44 crc kubenswrapper[4902]: E1202 15:01:44.123032 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5764a272281607e6b6a607cfbee6b2152375b6d2da0876c313e374b72315ec1b\": container with ID starting with 5764a272281607e6b6a607cfbee6b2152375b6d2da0876c313e374b72315ec1b not found: ID does not exist" containerID="5764a272281607e6b6a607cfbee6b2152375b6d2da0876c313e374b72315ec1b" Dec 02 15:01:44 crc kubenswrapper[4902]: I1202 15:01:44.123076 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5764a272281607e6b6a607cfbee6b2152375b6d2da0876c313e374b72315ec1b"} err="failed to get container status \"5764a272281607e6b6a607cfbee6b2152375b6d2da0876c313e374b72315ec1b\": rpc error: code = NotFound desc = could not find container \"5764a272281607e6b6a607cfbee6b2152375b6d2da0876c313e374b72315ec1b\": container with ID starting with 5764a272281607e6b6a607cfbee6b2152375b6d2da0876c313e374b72315ec1b not found: ID does not exist" Dec 02 15:01:44 crc kubenswrapper[4902]: I1202 15:01:44.123130 4902 scope.go:117] "RemoveContainer" containerID="06d88ad3e51a6fc1b7a9f7929f62c3f66fe581974e2595bbe7d5b8778fa230f7" Dec 02 15:01:44 crc kubenswrapper[4902]: E1202 15:01:44.123497 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06d88ad3e51a6fc1b7a9f7929f62c3f66fe581974e2595bbe7d5b8778fa230f7\": container with ID starting with 06d88ad3e51a6fc1b7a9f7929f62c3f66fe581974e2595bbe7d5b8778fa230f7 not found: ID does not exist" containerID="06d88ad3e51a6fc1b7a9f7929f62c3f66fe581974e2595bbe7d5b8778fa230f7" Dec 02 15:01:44 crc kubenswrapper[4902]: I1202 15:01:44.123533 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06d88ad3e51a6fc1b7a9f7929f62c3f66fe581974e2595bbe7d5b8778fa230f7"} err="failed to get container status \"06d88ad3e51a6fc1b7a9f7929f62c3f66fe581974e2595bbe7d5b8778fa230f7\": rpc error: code = NotFound desc = could not find container \"06d88ad3e51a6fc1b7a9f7929f62c3f66fe581974e2595bbe7d5b8778fa230f7\": container with ID starting with 06d88ad3e51a6fc1b7a9f7929f62c3f66fe581974e2595bbe7d5b8778fa230f7 not found: ID does not exist" Dec 02 15:01:45 crc kubenswrapper[4902]: I1202 15:01:45.124737 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e2a4fab-c869-4e79-978c-052e2c2d0d36" path="/var/lib/kubelet/pods/0e2a4fab-c869-4e79-978c-052e2c2d0d36/volumes" Dec 02 15:03:24 crc kubenswrapper[4902]: I1202 15:03:24.124327 4902 generic.go:334] "Generic (PLEG): container finished" podID="5bbd6a08-3913-4037-81e5-f7fd18479977" containerID="7c78f7b9a501746d50680daa4e06d253bd7be04a5cee71e42c015d111c0ff088" exitCode=0 Dec 02 15:03:24 crc kubenswrapper[4902]: I1202 15:03:24.124581 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" event={"ID":"5bbd6a08-3913-4037-81e5-f7fd18479977","Type":"ContainerDied","Data":"7c78f7b9a501746d50680daa4e06d253bd7be04a5cee71e42c015d111c0ff088"} Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.567684 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.691907 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-telemetry-combined-ca-bundle\") pod \"5bbd6a08-3913-4037-81e5-f7fd18479977\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.691950 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-447q4\" (UniqueName: \"kubernetes.io/projected/5bbd6a08-3913-4037-81e5-f7fd18479977-kube-api-access-447q4\") pod \"5bbd6a08-3913-4037-81e5-f7fd18479977\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.691985 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-0\") pod \"5bbd6a08-3913-4037-81e5-f7fd18479977\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.692055 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ssh-key\") pod \"5bbd6a08-3913-4037-81e5-f7fd18479977\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.692077 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-inventory\") pod \"5bbd6a08-3913-4037-81e5-f7fd18479977\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.692094 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-1\") pod \"5bbd6a08-3913-4037-81e5-f7fd18479977\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.692131 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-2\") pod \"5bbd6a08-3913-4037-81e5-f7fd18479977\" (UID: \"5bbd6a08-3913-4037-81e5-f7fd18479977\") " Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.700616 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "5bbd6a08-3913-4037-81e5-f7fd18479977" (UID: "5bbd6a08-3913-4037-81e5-f7fd18479977"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.701163 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bbd6a08-3913-4037-81e5-f7fd18479977-kube-api-access-447q4" (OuterVolumeSpecName: "kube-api-access-447q4") pod "5bbd6a08-3913-4037-81e5-f7fd18479977" (UID: "5bbd6a08-3913-4037-81e5-f7fd18479977"). InnerVolumeSpecName "kube-api-access-447q4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.722083 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "5bbd6a08-3913-4037-81e5-f7fd18479977" (UID: "5bbd6a08-3913-4037-81e5-f7fd18479977"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.723547 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "5bbd6a08-3913-4037-81e5-f7fd18479977" (UID: "5bbd6a08-3913-4037-81e5-f7fd18479977"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.728848 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "5bbd6a08-3913-4037-81e5-f7fd18479977" (UID: "5bbd6a08-3913-4037-81e5-f7fd18479977"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.730240 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-inventory" (OuterVolumeSpecName: "inventory") pod "5bbd6a08-3913-4037-81e5-f7fd18479977" (UID: "5bbd6a08-3913-4037-81e5-f7fd18479977"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.733447 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5bbd6a08-3913-4037-81e5-f7fd18479977" (UID: "5bbd6a08-3913-4037-81e5-f7fd18479977"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.794210 4902 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.794248 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.794260 4902 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.794268 4902 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.794278 4902 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.794288 4902 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bbd6a08-3913-4037-81e5-f7fd18479977-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 15:03:25 crc kubenswrapper[4902]: I1202 15:03:25.794298 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-447q4\" (UniqueName: \"kubernetes.io/projected/5bbd6a08-3913-4037-81e5-f7fd18479977-kube-api-access-447q4\") on node \"crc\" DevicePath \"\"" Dec 02 15:03:26 crc kubenswrapper[4902]: I1202 15:03:26.156860 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" Dec 02 15:03:26 crc kubenswrapper[4902]: I1202 15:03:26.156507 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w" event={"ID":"5bbd6a08-3913-4037-81e5-f7fd18479977","Type":"ContainerDied","Data":"02540323d2ed631085bd32df0de776f175637dccbea9b5714c88f0874f7f6862"} Dec 02 15:03:26 crc kubenswrapper[4902]: I1202 15:03:26.157813 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="02540323d2ed631085bd32df0de776f175637dccbea9b5714c88f0874f7f6862" Dec 02 15:04:03 crc kubenswrapper[4902]: I1202 15:04:03.751991 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 15:04:03 crc kubenswrapper[4902]: I1202 15:04:03.753042 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerName="prometheus" containerID="cri-o://5a65e34a3f138b9b9885263c5f6096d72dd6924e2ab5b59378dd1482d06a4aad" gracePeriod=600 Dec 02 15:04:03 crc kubenswrapper[4902]: I1202 15:04:03.753175 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerName="config-reloader" containerID="cri-o://777275b52e88fccb21ce0d89ccb2d4aca2b28a9de09e42ae43f817e4c0ea1d5b" gracePeriod=600 Dec 02 15:04:03 crc kubenswrapper[4902]: I1202 15:04:03.753164 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerName="thanos-sidecar" containerID="cri-o://dd3df0b598e7c1cd68317da2c80ba119932a2975a8d3359e8e0f97b19960a398" gracePeriod=600 Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.626294 4902 generic.go:334] "Generic (PLEG): container finished" podID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerID="dd3df0b598e7c1cd68317da2c80ba119932a2975a8d3359e8e0f97b19960a398" exitCode=0 Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.626336 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f3ebb493-070e-44be-ae1e-b89464b9011f","Type":"ContainerDied","Data":"dd3df0b598e7c1cd68317da2c80ba119932a2975a8d3359e8e0f97b19960a398"} Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.628090 4902 generic.go:334] "Generic (PLEG): container finished" podID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerID="777275b52e88fccb21ce0d89ccb2d4aca2b28a9de09e42ae43f817e4c0ea1d5b" exitCode=0 Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.628116 4902 generic.go:334] "Generic (PLEG): container finished" podID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerID="5a65e34a3f138b9b9885263c5f6096d72dd6924e2ab5b59378dd1482d06a4aad" exitCode=0 Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.628130 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f3ebb493-070e-44be-ae1e-b89464b9011f","Type":"ContainerDied","Data":"777275b52e88fccb21ce0d89ccb2d4aca2b28a9de09e42ae43f817e4c0ea1d5b"} Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.628175 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f3ebb493-070e-44be-ae1e-b89464b9011f","Type":"ContainerDied","Data":"5a65e34a3f138b9b9885263c5f6096d72dd6924e2ab5b59378dd1482d06a4aad"} Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.731118 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.731164 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.827684 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.903364 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w587l\" (UniqueName: \"kubernetes.io/projected/f3ebb493-070e-44be-ae1e-b89464b9011f-kube-api-access-w587l\") pod \"f3ebb493-070e-44be-ae1e-b89464b9011f\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.903660 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-config\") pod \"f3ebb493-070e-44be-ae1e-b89464b9011f\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.903799 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") pod \"f3ebb493-070e-44be-ae1e-b89464b9011f\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.903867 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f3ebb493-070e-44be-ae1e-b89464b9011f-config-out\") pod \"f3ebb493-070e-44be-ae1e-b89464b9011f\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.903952 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-secret-combined-ca-bundle\") pod \"f3ebb493-070e-44be-ae1e-b89464b9011f\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.903985 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f3ebb493-070e-44be-ae1e-b89464b9011f-tls-assets\") pod \"f3ebb493-070e-44be-ae1e-b89464b9011f\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.904006 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"f3ebb493-070e-44be-ae1e-b89464b9011f\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.904031 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f3ebb493-070e-44be-ae1e-b89464b9011f-prometheus-metric-storage-rulefiles-0\") pod \"f3ebb493-070e-44be-ae1e-b89464b9011f\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.904064 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"f3ebb493-070e-44be-ae1e-b89464b9011f\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.904121 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-thanos-prometheus-http-client-file\") pod \"f3ebb493-070e-44be-ae1e-b89464b9011f\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.904143 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config\") pod \"f3ebb493-070e-44be-ae1e-b89464b9011f\" (UID: \"f3ebb493-070e-44be-ae1e-b89464b9011f\") " Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.906927 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3ebb493-070e-44be-ae1e-b89464b9011f-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "f3ebb493-070e-44be-ae1e-b89464b9011f" (UID: "f3ebb493-070e-44be-ae1e-b89464b9011f"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.915098 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d") pod "f3ebb493-070e-44be-ae1e-b89464b9011f" (UID: "f3ebb493-070e-44be-ae1e-b89464b9011f"). InnerVolumeSpecName "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.915211 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3ebb493-070e-44be-ae1e-b89464b9011f-kube-api-access-w587l" (OuterVolumeSpecName: "kube-api-access-w587l") pod "f3ebb493-070e-44be-ae1e-b89464b9011f" (UID: "f3ebb493-070e-44be-ae1e-b89464b9011f"). InnerVolumeSpecName "kube-api-access-w587l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.915276 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "f3ebb493-070e-44be-ae1e-b89464b9011f" (UID: "f3ebb493-070e-44be-ae1e-b89464b9011f"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.917259 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d") pod "f3ebb493-070e-44be-ae1e-b89464b9011f" (UID: "f3ebb493-070e-44be-ae1e-b89464b9011f"). InnerVolumeSpecName "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.918159 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-secret-combined-ca-bundle" (OuterVolumeSpecName: "secret-combined-ca-bundle") pod "f3ebb493-070e-44be-ae1e-b89464b9011f" (UID: "f3ebb493-070e-44be-ae1e-b89464b9011f"). InnerVolumeSpecName "secret-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.918205 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3ebb493-070e-44be-ae1e-b89464b9011f-config-out" (OuterVolumeSpecName: "config-out") pod "f3ebb493-070e-44be-ae1e-b89464b9011f" (UID: "f3ebb493-070e-44be-ae1e-b89464b9011f"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.918963 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-config" (OuterVolumeSpecName: "config") pod "f3ebb493-070e-44be-ae1e-b89464b9011f" (UID: "f3ebb493-070e-44be-ae1e-b89464b9011f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.920395 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3ebb493-070e-44be-ae1e-b89464b9011f-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "f3ebb493-070e-44be-ae1e-b89464b9011f" (UID: "f3ebb493-070e-44be-ae1e-b89464b9011f"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:04:04 crc kubenswrapper[4902]: I1202 15:04:04.992780 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "f3ebb493-070e-44be-ae1e-b89464b9011f" (UID: "f3ebb493-070e-44be-ae1e-b89464b9011f"). InnerVolumeSpecName "pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.006121 4902 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.006154 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w587l\" (UniqueName: \"kubernetes.io/projected/f3ebb493-070e-44be-ae1e-b89464b9011f-kube-api-access-w587l\") on node \"crc\" DevicePath \"\"" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.006164 4902 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-config\") on node \"crc\" DevicePath \"\"" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.006187 4902 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") on node \"crc\" " Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.006199 4902 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f3ebb493-070e-44be-ae1e-b89464b9011f-config-out\") on node \"crc\" DevicePath \"\"" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.006208 4902 reconciler_common.go:293] "Volume detached for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-secret-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.006220 4902 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f3ebb493-070e-44be-ae1e-b89464b9011f-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.006229 4902 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") on node \"crc\" DevicePath \"\"" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.006239 4902 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f3ebb493-070e-44be-ae1e-b89464b9011f-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.006248 4902 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") on node \"crc\" DevicePath \"\"" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.033014 4902 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.033523 4902 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145") on node "crc" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.048496 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config" (OuterVolumeSpecName: "web-config") pod "f3ebb493-070e-44be-ae1e-b89464b9011f" (UID: "f3ebb493-070e-44be-ae1e-b89464b9011f"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.107576 4902 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f3ebb493-070e-44be-ae1e-b89464b9011f-web-config\") on node \"crc\" DevicePath \"\"" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.107613 4902 reconciler_common.go:293] "Volume detached for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") on node \"crc\" DevicePath \"\"" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.637622 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f3ebb493-070e-44be-ae1e-b89464b9011f","Type":"ContainerDied","Data":"5bd9ae44a2412fad3e165b3c38eb9c8a31a54af8f395802a9f137f833e7d7788"} Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.637753 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.637994 4902 scope.go:117] "RemoveContainer" containerID="dd3df0b598e7c1cd68317da2c80ba119932a2975a8d3359e8e0f97b19960a398" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.661058 4902 scope.go:117] "RemoveContainer" containerID="777275b52e88fccb21ce0d89ccb2d4aca2b28a9de09e42ae43f817e4c0ea1d5b" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.661713 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.673631 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.690087 4902 scope.go:117] "RemoveContainer" containerID="5a65e34a3f138b9b9885263c5f6096d72dd6924e2ab5b59378dd1482d06a4aad" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.705747 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 15:04:05 crc kubenswrapper[4902]: E1202 15:04:05.706259 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e2a4fab-c869-4e79-978c-052e2c2d0d36" containerName="extract-content" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.706280 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e2a4fab-c869-4e79-978c-052e2c2d0d36" containerName="extract-content" Dec 02 15:04:05 crc kubenswrapper[4902]: E1202 15:04:05.706303 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerName="init-config-reloader" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.706312 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerName="init-config-reloader" Dec 02 15:04:05 crc kubenswrapper[4902]: E1202 15:04:05.706334 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerName="thanos-sidecar" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.706343 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerName="thanos-sidecar" Dec 02 15:04:05 crc kubenswrapper[4902]: E1202 15:04:05.706365 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e2a4fab-c869-4e79-978c-052e2c2d0d36" containerName="registry-server" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.706373 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e2a4fab-c869-4e79-978c-052e2c2d0d36" containerName="registry-server" Dec 02 15:04:05 crc kubenswrapper[4902]: E1202 15:04:05.706385 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerName="prometheus" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.706393 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerName="prometheus" Dec 02 15:04:05 crc kubenswrapper[4902]: E1202 15:04:05.706411 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bbd6a08-3913-4037-81e5-f7fd18479977" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.706420 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bbd6a08-3913-4037-81e5-f7fd18479977" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 02 15:04:05 crc kubenswrapper[4902]: E1202 15:04:05.706439 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerName="config-reloader" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.706447 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerName="config-reloader" Dec 02 15:04:05 crc kubenswrapper[4902]: E1202 15:04:05.706467 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e2a4fab-c869-4e79-978c-052e2c2d0d36" containerName="extract-utilities" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.706476 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e2a4fab-c869-4e79-978c-052e2c2d0d36" containerName="extract-utilities" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.706776 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerName="thanos-sidecar" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.706797 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e2a4fab-c869-4e79-978c-052e2c2d0d36" containerName="registry-server" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.706814 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerName="config-reloader" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.706827 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bbd6a08-3913-4037-81e5-f7fd18479977" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.706851 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3ebb493-070e-44be-ae1e-b89464b9011f" containerName="prometheus" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.709027 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.712421 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-k2vng" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.712580 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.712611 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.712654 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.714273 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.721985 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.735361 4902 scope.go:117] "RemoveContainer" containerID="9f98c6399543b6aeb0902271cbea14acf7f3eec0bae6d280d6de5237b1a46914" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.793125 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.842948 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.843023 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.843082 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.843133 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.843161 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddghd\" (UniqueName: \"kubernetes.io/projected/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-kube-api-access-ddghd\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.843186 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.843240 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-config\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.843324 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.843349 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.843381 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.843432 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.946728 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.946807 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.946859 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddghd\" (UniqueName: \"kubernetes.io/projected/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-kube-api-access-ddghd\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.946887 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.946935 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-config\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.947000 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.947025 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.947060 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.947102 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.947165 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.947205 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.948139 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.958237 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.958262 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.962705 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.963096 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.968766 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-config\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.969062 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.969183 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.969263 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.969321 4902 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.969352 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/665a64dfccf06cca52387316c6cee605d1c10e0fb3133ba2864d18ab912b518d/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:05 crc kubenswrapper[4902]: I1202 15:04:05.984307 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddghd\" (UniqueName: \"kubernetes.io/projected/e67ef796-c39e-45e9-bbcf-bfb0fd77dff4-kube-api-access-ddghd\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:06 crc kubenswrapper[4902]: I1202 15:04:06.026529 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aa3e33fd-8a9f-4711-a631-d48a8642e145\") pod \"prometheus-metric-storage-0\" (UID: \"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4\") " pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:06 crc kubenswrapper[4902]: I1202 15:04:06.033160 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:06 crc kubenswrapper[4902]: I1202 15:04:06.485219 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 15:04:06 crc kubenswrapper[4902]: W1202 15:04:06.496736 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode67ef796_c39e_45e9_bbcf_bfb0fd77dff4.slice/crio-03623a324e5f0a001070a38db20d8bfc8dfaa8dd22b3981ff7ec5c3b0f0dfa57 WatchSource:0}: Error finding container 03623a324e5f0a001070a38db20d8bfc8dfaa8dd22b3981ff7ec5c3b0f0dfa57: Status 404 returned error can't find the container with id 03623a324e5f0a001070a38db20d8bfc8dfaa8dd22b3981ff7ec5c3b0f0dfa57 Dec 02 15:04:06 crc kubenswrapper[4902]: I1202 15:04:06.646873 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4","Type":"ContainerStarted","Data":"03623a324e5f0a001070a38db20d8bfc8dfaa8dd22b3981ff7ec5c3b0f0dfa57"} Dec 02 15:04:07 crc kubenswrapper[4902]: I1202 15:04:07.121961 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3ebb493-070e-44be-ae1e-b89464b9011f" path="/var/lib/kubelet/pods/f3ebb493-070e-44be-ae1e-b89464b9011f/volumes" Dec 02 15:04:10 crc kubenswrapper[4902]: I1202 15:04:10.716371 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4","Type":"ContainerStarted","Data":"9b6a217f0feda9ac93a2ffd7fdfcff835878738a99cf8b86dd56bedd1a35a6ec"} Dec 02 15:04:16 crc kubenswrapper[4902]: I1202 15:04:16.785626 4902 generic.go:334] "Generic (PLEG): container finished" podID="e67ef796-c39e-45e9-bbcf-bfb0fd77dff4" containerID="9b6a217f0feda9ac93a2ffd7fdfcff835878738a99cf8b86dd56bedd1a35a6ec" exitCode=0 Dec 02 15:04:16 crc kubenswrapper[4902]: I1202 15:04:16.785691 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4","Type":"ContainerDied","Data":"9b6a217f0feda9ac93a2ffd7fdfcff835878738a99cf8b86dd56bedd1a35a6ec"} Dec 02 15:04:17 crc kubenswrapper[4902]: I1202 15:04:17.799865 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4","Type":"ContainerStarted","Data":"1428b6d9e84a8454f2137b2b44fe2cf4034d12e5974117c3453a1249444f3ef1"} Dec 02 15:04:20 crc kubenswrapper[4902]: I1202 15:04:20.842834 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4","Type":"ContainerStarted","Data":"2c3baa99bd28557532667b1f54e7a5656f26767f4a3ba090f1b0521a7307641c"} Dec 02 15:04:20 crc kubenswrapper[4902]: I1202 15:04:20.843461 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e67ef796-c39e-45e9-bbcf-bfb0fd77dff4","Type":"ContainerStarted","Data":"3c3d80c93a3312a5d995b70032ec5078008d691474e6e5d7f54f8d8d8d8f956e"} Dec 02 15:04:20 crc kubenswrapper[4902]: I1202 15:04:20.874537 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=15.874516346 podStartE2EDuration="15.874516346s" podCreationTimestamp="2025-12-02 15:04:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 15:04:20.869493034 +0000 UTC m=+2892.060801763" watchObservedRunningTime="2025-12-02 15:04:20.874516346 +0000 UTC m=+2892.065825065" Dec 02 15:04:21 crc kubenswrapper[4902]: I1202 15:04:21.033514 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:21 crc kubenswrapper[4902]: I1202 15:04:21.033575 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:21 crc kubenswrapper[4902]: I1202 15:04:21.039882 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:21 crc kubenswrapper[4902]: I1202 15:04:21.857708 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 02 15:04:34 crc kubenswrapper[4902]: I1202 15:04:34.731961 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:04:34 crc kubenswrapper[4902]: I1202 15:04:34.732591 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.554395 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.557012 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.561058 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-vc7jf" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.561288 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.563185 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.564319 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.568183 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.662267 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8qd2\" (UniqueName: \"kubernetes.io/projected/3469f18f-c530-4a00-91ba-95720c45b4c2-kube-api-access-b8qd2\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.662373 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/3469f18f-c530-4a00-91ba-95720c45b4c2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.662410 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/3469f18f-c530-4a00-91ba-95720c45b4c2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.662443 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.662481 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3469f18f-c530-4a00-91ba-95720c45b4c2-config-data\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.662505 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.662540 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3469f18f-c530-4a00-91ba-95720c45b4c2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.662611 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.662662 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.764393 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/3469f18f-c530-4a00-91ba-95720c45b4c2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.764440 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/3469f18f-c530-4a00-91ba-95720c45b4c2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.764467 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.764499 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3469f18f-c530-4a00-91ba-95720c45b4c2-config-data\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.764530 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.764581 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3469f18f-c530-4a00-91ba-95720c45b4c2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.764643 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.764697 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.764754 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8qd2\" (UniqueName: \"kubernetes.io/projected/3469f18f-c530-4a00-91ba-95720c45b4c2-kube-api-access-b8qd2\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.765009 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.765353 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/3469f18f-c530-4a00-91ba-95720c45b4c2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.765822 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3469f18f-c530-4a00-91ba-95720c45b4c2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.765926 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/3469f18f-c530-4a00-91ba-95720c45b4c2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.767123 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3469f18f-c530-4a00-91ba-95720c45b4c2-config-data\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.776402 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.776440 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.776807 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.790903 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8qd2\" (UniqueName: \"kubernetes.io/projected/3469f18f-c530-4a00-91ba-95720c45b4c2-kube-api-access-b8qd2\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.796283 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " pod="openstack/tempest-tests-tempest" Dec 02 15:04:57 crc kubenswrapper[4902]: I1202 15:04:57.885932 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 15:04:58 crc kubenswrapper[4902]: I1202 15:04:58.366594 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 02 15:04:58 crc kubenswrapper[4902]: I1202 15:04:58.653885 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"3469f18f-c530-4a00-91ba-95720c45b4c2","Type":"ContainerStarted","Data":"0d1f4521f335f96cfd0179d56ebfc38a96cf149275497f27093d291b56b82ffc"} Dec 02 15:05:04 crc kubenswrapper[4902]: I1202 15:05:04.739755 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:05:04 crc kubenswrapper[4902]: I1202 15:05:04.740276 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:05:04 crc kubenswrapper[4902]: I1202 15:05:04.740320 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 15:05:04 crc kubenswrapper[4902]: I1202 15:05:04.740978 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9e888b9484cfa25ccd5bb37bb6aeb3dccfcd6727570895d583db033537ec4157"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 15:05:04 crc kubenswrapper[4902]: I1202 15:05:04.741026 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://9e888b9484cfa25ccd5bb37bb6aeb3dccfcd6727570895d583db033537ec4157" gracePeriod=600 Dec 02 15:05:05 crc kubenswrapper[4902]: I1202 15:05:05.806175 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="9e888b9484cfa25ccd5bb37bb6aeb3dccfcd6727570895d583db033537ec4157" exitCode=0 Dec 02 15:05:05 crc kubenswrapper[4902]: I1202 15:05:05.806260 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"9e888b9484cfa25ccd5bb37bb6aeb3dccfcd6727570895d583db033537ec4157"} Dec 02 15:05:05 crc kubenswrapper[4902]: I1202 15:05:05.806582 4902 scope.go:117] "RemoveContainer" containerID="301592f763a0795feac8fede8dcbc9724498008c39f1bb7949bc966965336b90" Dec 02 15:05:09 crc kubenswrapper[4902]: I1202 15:05:09.983447 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 02 15:05:10 crc kubenswrapper[4902]: I1202 15:05:10.863100 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"3469f18f-c530-4a00-91ba-95720c45b4c2","Type":"ContainerStarted","Data":"9db67023a14534dd3abe8c6358e68f675c257f28d1b164fa902d65ab41d5e998"} Dec 02 15:05:10 crc kubenswrapper[4902]: I1202 15:05:10.866048 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b"} Dec 02 15:05:10 crc kubenswrapper[4902]: I1202 15:05:10.885732 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.274106311 podStartE2EDuration="14.885708812s" podCreationTimestamp="2025-12-02 15:04:56 +0000 UTC" firstStartedPulling="2025-12-02 15:04:58.36925086 +0000 UTC m=+2929.560559579" lastFinishedPulling="2025-12-02 15:05:09.980853371 +0000 UTC m=+2941.172162080" observedRunningTime="2025-12-02 15:05:10.883929041 +0000 UTC m=+2942.075237750" watchObservedRunningTime="2025-12-02 15:05:10.885708812 +0000 UTC m=+2942.077017521" Dec 02 15:07:08 crc kubenswrapper[4902]: I1202 15:07:08.984940 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hn4zm"] Dec 02 15:07:08 crc kubenswrapper[4902]: I1202 15:07:08.995550 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:09 crc kubenswrapper[4902]: I1202 15:07:08.999418 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hn4zm"] Dec 02 15:07:09 crc kubenswrapper[4902]: I1202 15:07:09.077529 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-utilities\") pod \"certified-operators-hn4zm\" (UID: \"a4dc6914-1de2-4e84-80ca-c09b397f8b4b\") " pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:09 crc kubenswrapper[4902]: I1202 15:07:09.077817 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-catalog-content\") pod \"certified-operators-hn4zm\" (UID: \"a4dc6914-1de2-4e84-80ca-c09b397f8b4b\") " pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:09 crc kubenswrapper[4902]: I1202 15:07:09.077878 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzfd4\" (UniqueName: \"kubernetes.io/projected/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-kube-api-access-tzfd4\") pod \"certified-operators-hn4zm\" (UID: \"a4dc6914-1de2-4e84-80ca-c09b397f8b4b\") " pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:09 crc kubenswrapper[4902]: I1202 15:07:09.179949 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-utilities\") pod \"certified-operators-hn4zm\" (UID: \"a4dc6914-1de2-4e84-80ca-c09b397f8b4b\") " pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:09 crc kubenswrapper[4902]: I1202 15:07:09.180000 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-catalog-content\") pod \"certified-operators-hn4zm\" (UID: \"a4dc6914-1de2-4e84-80ca-c09b397f8b4b\") " pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:09 crc kubenswrapper[4902]: I1202 15:07:09.180057 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzfd4\" (UniqueName: \"kubernetes.io/projected/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-kube-api-access-tzfd4\") pod \"certified-operators-hn4zm\" (UID: \"a4dc6914-1de2-4e84-80ca-c09b397f8b4b\") " pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:09 crc kubenswrapper[4902]: I1202 15:07:09.180467 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-utilities\") pod \"certified-operators-hn4zm\" (UID: \"a4dc6914-1de2-4e84-80ca-c09b397f8b4b\") " pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:09 crc kubenswrapper[4902]: I1202 15:07:09.180627 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-catalog-content\") pod \"certified-operators-hn4zm\" (UID: \"a4dc6914-1de2-4e84-80ca-c09b397f8b4b\") " pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:09 crc kubenswrapper[4902]: I1202 15:07:09.205265 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzfd4\" (UniqueName: \"kubernetes.io/projected/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-kube-api-access-tzfd4\") pod \"certified-operators-hn4zm\" (UID: \"a4dc6914-1de2-4e84-80ca-c09b397f8b4b\") " pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:09 crc kubenswrapper[4902]: I1202 15:07:09.334135 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:09 crc kubenswrapper[4902]: I1202 15:07:09.822209 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hn4zm"] Dec 02 15:07:10 crc kubenswrapper[4902]: I1202 15:07:10.223229 4902 generic.go:334] "Generic (PLEG): container finished" podID="a4dc6914-1de2-4e84-80ca-c09b397f8b4b" containerID="407ddbf49d7cb0ba997dcbf13bb4f5b67f39ee0a20662fec471e95086750a81c" exitCode=0 Dec 02 15:07:10 crc kubenswrapper[4902]: I1202 15:07:10.223312 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hn4zm" event={"ID":"a4dc6914-1de2-4e84-80ca-c09b397f8b4b","Type":"ContainerDied","Data":"407ddbf49d7cb0ba997dcbf13bb4f5b67f39ee0a20662fec471e95086750a81c"} Dec 02 15:07:10 crc kubenswrapper[4902]: I1202 15:07:10.223574 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hn4zm" event={"ID":"a4dc6914-1de2-4e84-80ca-c09b397f8b4b","Type":"ContainerStarted","Data":"17f378736371291cedc81b7e4996797bfb9f3fd7ee0edddd963cdbefd31dbd3d"} Dec 02 15:07:10 crc kubenswrapper[4902]: I1202 15:07:10.225185 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 15:07:12 crc kubenswrapper[4902]: I1202 15:07:12.243527 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hn4zm" event={"ID":"a4dc6914-1de2-4e84-80ca-c09b397f8b4b","Type":"ContainerStarted","Data":"a9dfd1ee4d6bb4e6c9da46a978c01c0bb6b0aeb49d65924ebfa93ca63b9e5892"} Dec 02 15:07:14 crc kubenswrapper[4902]: I1202 15:07:14.267296 4902 generic.go:334] "Generic (PLEG): container finished" podID="a4dc6914-1de2-4e84-80ca-c09b397f8b4b" containerID="a9dfd1ee4d6bb4e6c9da46a978c01c0bb6b0aeb49d65924ebfa93ca63b9e5892" exitCode=0 Dec 02 15:07:14 crc kubenswrapper[4902]: I1202 15:07:14.267869 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hn4zm" event={"ID":"a4dc6914-1de2-4e84-80ca-c09b397f8b4b","Type":"ContainerDied","Data":"a9dfd1ee4d6bb4e6c9da46a978c01c0bb6b0aeb49d65924ebfa93ca63b9e5892"} Dec 02 15:07:15 crc kubenswrapper[4902]: I1202 15:07:15.369204 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6rft9"] Dec 02 15:07:15 crc kubenswrapper[4902]: I1202 15:07:15.372718 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:15 crc kubenswrapper[4902]: I1202 15:07:15.386428 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6rft9"] Dec 02 15:07:15 crc kubenswrapper[4902]: I1202 15:07:15.401091 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/007ee9b6-2c6f-41f1-9965-d5251e32f88c-utilities\") pod \"community-operators-6rft9\" (UID: \"007ee9b6-2c6f-41f1-9965-d5251e32f88c\") " pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:15 crc kubenswrapper[4902]: I1202 15:07:15.401533 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/007ee9b6-2c6f-41f1-9965-d5251e32f88c-catalog-content\") pod \"community-operators-6rft9\" (UID: \"007ee9b6-2c6f-41f1-9965-d5251e32f88c\") " pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:15 crc kubenswrapper[4902]: I1202 15:07:15.401900 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp6fr\" (UniqueName: \"kubernetes.io/projected/007ee9b6-2c6f-41f1-9965-d5251e32f88c-kube-api-access-tp6fr\") pod \"community-operators-6rft9\" (UID: \"007ee9b6-2c6f-41f1-9965-d5251e32f88c\") " pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:15 crc kubenswrapper[4902]: I1202 15:07:15.503082 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/007ee9b6-2c6f-41f1-9965-d5251e32f88c-utilities\") pod \"community-operators-6rft9\" (UID: \"007ee9b6-2c6f-41f1-9965-d5251e32f88c\") " pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:15 crc kubenswrapper[4902]: I1202 15:07:15.503670 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/007ee9b6-2c6f-41f1-9965-d5251e32f88c-utilities\") pod \"community-operators-6rft9\" (UID: \"007ee9b6-2c6f-41f1-9965-d5251e32f88c\") " pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:15 crc kubenswrapper[4902]: I1202 15:07:15.503893 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/007ee9b6-2c6f-41f1-9965-d5251e32f88c-catalog-content\") pod \"community-operators-6rft9\" (UID: \"007ee9b6-2c6f-41f1-9965-d5251e32f88c\") " pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:15 crc kubenswrapper[4902]: I1202 15:07:15.504214 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/007ee9b6-2c6f-41f1-9965-d5251e32f88c-catalog-content\") pod \"community-operators-6rft9\" (UID: \"007ee9b6-2c6f-41f1-9965-d5251e32f88c\") " pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:15 crc kubenswrapper[4902]: I1202 15:07:15.504390 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp6fr\" (UniqueName: \"kubernetes.io/projected/007ee9b6-2c6f-41f1-9965-d5251e32f88c-kube-api-access-tp6fr\") pod \"community-operators-6rft9\" (UID: \"007ee9b6-2c6f-41f1-9965-d5251e32f88c\") " pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:15 crc kubenswrapper[4902]: I1202 15:07:15.531865 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp6fr\" (UniqueName: \"kubernetes.io/projected/007ee9b6-2c6f-41f1-9965-d5251e32f88c-kube-api-access-tp6fr\") pod \"community-operators-6rft9\" (UID: \"007ee9b6-2c6f-41f1-9965-d5251e32f88c\") " pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:15 crc kubenswrapper[4902]: I1202 15:07:15.702938 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:16 crc kubenswrapper[4902]: I1202 15:07:16.189756 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6rft9"] Dec 02 15:07:16 crc kubenswrapper[4902]: I1202 15:07:16.290247 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6rft9" event={"ID":"007ee9b6-2c6f-41f1-9965-d5251e32f88c","Type":"ContainerStarted","Data":"74bcd55f39ac51e5f60fed1b4c969df2d3808627cdb5bda9d7cccc13e1fe3081"} Dec 02 15:07:16 crc kubenswrapper[4902]: I1202 15:07:16.293224 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hn4zm" event={"ID":"a4dc6914-1de2-4e84-80ca-c09b397f8b4b","Type":"ContainerStarted","Data":"41387876819be2399331430a5e7fbb2e55c65aed2e38a43508ca970dc2b9e5d7"} Dec 02 15:07:16 crc kubenswrapper[4902]: I1202 15:07:16.323246 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hn4zm" podStartSLOduration=3.123286293 podStartE2EDuration="8.323224992s" podCreationTimestamp="2025-12-02 15:07:08 +0000 UTC" firstStartedPulling="2025-12-02 15:07:10.224940878 +0000 UTC m=+3061.416249587" lastFinishedPulling="2025-12-02 15:07:15.424879567 +0000 UTC m=+3066.616188286" observedRunningTime="2025-12-02 15:07:16.313434695 +0000 UTC m=+3067.504743414" watchObservedRunningTime="2025-12-02 15:07:16.323224992 +0000 UTC m=+3067.514533701" Dec 02 15:07:17 crc kubenswrapper[4902]: I1202 15:07:17.303909 4902 generic.go:334] "Generic (PLEG): container finished" podID="007ee9b6-2c6f-41f1-9965-d5251e32f88c" containerID="76fddfc8c046d32ed270c0706ef2a5cdcdb006f943e826fa37f21d3838951ef0" exitCode=0 Dec 02 15:07:17 crc kubenswrapper[4902]: I1202 15:07:17.303975 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6rft9" event={"ID":"007ee9b6-2c6f-41f1-9965-d5251e32f88c","Type":"ContainerDied","Data":"76fddfc8c046d32ed270c0706ef2a5cdcdb006f943e826fa37f21d3838951ef0"} Dec 02 15:07:19 crc kubenswrapper[4902]: I1202 15:07:19.329546 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6rft9" event={"ID":"007ee9b6-2c6f-41f1-9965-d5251e32f88c","Type":"ContainerStarted","Data":"f7f284a8ad2b213d77cfb37516c59b8223632970943efacc71967f6db5ca05ce"} Dec 02 15:07:19 crc kubenswrapper[4902]: I1202 15:07:19.334255 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:19 crc kubenswrapper[4902]: I1202 15:07:19.334368 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:20 crc kubenswrapper[4902]: I1202 15:07:20.344419 4902 generic.go:334] "Generic (PLEG): container finished" podID="007ee9b6-2c6f-41f1-9965-d5251e32f88c" containerID="f7f284a8ad2b213d77cfb37516c59b8223632970943efacc71967f6db5ca05ce" exitCode=0 Dec 02 15:07:20 crc kubenswrapper[4902]: I1202 15:07:20.346252 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6rft9" event={"ID":"007ee9b6-2c6f-41f1-9965-d5251e32f88c","Type":"ContainerDied","Data":"f7f284a8ad2b213d77cfb37516c59b8223632970943efacc71967f6db5ca05ce"} Dec 02 15:07:20 crc kubenswrapper[4902]: I1202 15:07:20.391083 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-hn4zm" podUID="a4dc6914-1de2-4e84-80ca-c09b397f8b4b" containerName="registry-server" probeResult="failure" output=< Dec 02 15:07:20 crc kubenswrapper[4902]: timeout: failed to connect service ":50051" within 1s Dec 02 15:07:20 crc kubenswrapper[4902]: > Dec 02 15:07:21 crc kubenswrapper[4902]: I1202 15:07:21.360477 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6rft9" event={"ID":"007ee9b6-2c6f-41f1-9965-d5251e32f88c","Type":"ContainerStarted","Data":"f7bc9f31859e17c0292ea27df99222187360dc3ec70488d77d16e8929bccf145"} Dec 02 15:07:21 crc kubenswrapper[4902]: I1202 15:07:21.390309 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6rft9" podStartSLOduration=2.790718707 podStartE2EDuration="6.390288904s" podCreationTimestamp="2025-12-02 15:07:15 +0000 UTC" firstStartedPulling="2025-12-02 15:07:17.306037083 +0000 UTC m=+3068.497345802" lastFinishedPulling="2025-12-02 15:07:20.90560728 +0000 UTC m=+3072.096915999" observedRunningTime="2025-12-02 15:07:21.387858425 +0000 UTC m=+3072.579167144" watchObservedRunningTime="2025-12-02 15:07:21.390288904 +0000 UTC m=+3072.581597623" Dec 02 15:07:25 crc kubenswrapper[4902]: I1202 15:07:25.704935 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:25 crc kubenswrapper[4902]: I1202 15:07:25.705552 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:25 crc kubenswrapper[4902]: I1202 15:07:25.821448 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:26 crc kubenswrapper[4902]: I1202 15:07:26.503695 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:26 crc kubenswrapper[4902]: I1202 15:07:26.564288 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6rft9"] Dec 02 15:07:28 crc kubenswrapper[4902]: I1202 15:07:28.435791 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6rft9" podUID="007ee9b6-2c6f-41f1-9965-d5251e32f88c" containerName="registry-server" containerID="cri-o://f7bc9f31859e17c0292ea27df99222187360dc3ec70488d77d16e8929bccf145" gracePeriod=2 Dec 02 15:07:28 crc kubenswrapper[4902]: I1202 15:07:28.958527 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.099421 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tp6fr\" (UniqueName: \"kubernetes.io/projected/007ee9b6-2c6f-41f1-9965-d5251e32f88c-kube-api-access-tp6fr\") pod \"007ee9b6-2c6f-41f1-9965-d5251e32f88c\" (UID: \"007ee9b6-2c6f-41f1-9965-d5251e32f88c\") " Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.099926 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/007ee9b6-2c6f-41f1-9965-d5251e32f88c-utilities\") pod \"007ee9b6-2c6f-41f1-9965-d5251e32f88c\" (UID: \"007ee9b6-2c6f-41f1-9965-d5251e32f88c\") " Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.099969 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/007ee9b6-2c6f-41f1-9965-d5251e32f88c-catalog-content\") pod \"007ee9b6-2c6f-41f1-9965-d5251e32f88c\" (UID: \"007ee9b6-2c6f-41f1-9965-d5251e32f88c\") " Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.100629 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/007ee9b6-2c6f-41f1-9965-d5251e32f88c-utilities" (OuterVolumeSpecName: "utilities") pod "007ee9b6-2c6f-41f1-9965-d5251e32f88c" (UID: "007ee9b6-2c6f-41f1-9965-d5251e32f88c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.104862 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/007ee9b6-2c6f-41f1-9965-d5251e32f88c-kube-api-access-tp6fr" (OuterVolumeSpecName: "kube-api-access-tp6fr") pod "007ee9b6-2c6f-41f1-9965-d5251e32f88c" (UID: "007ee9b6-2c6f-41f1-9965-d5251e32f88c"). InnerVolumeSpecName "kube-api-access-tp6fr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.177215 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/007ee9b6-2c6f-41f1-9965-d5251e32f88c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "007ee9b6-2c6f-41f1-9965-d5251e32f88c" (UID: "007ee9b6-2c6f-41f1-9965-d5251e32f88c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.202306 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/007ee9b6-2c6f-41f1-9965-d5251e32f88c-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.202335 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/007ee9b6-2c6f-41f1-9965-d5251e32f88c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.202347 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp6fr\" (UniqueName: \"kubernetes.io/projected/007ee9b6-2c6f-41f1-9965-d5251e32f88c-kube-api-access-tp6fr\") on node \"crc\" DevicePath \"\"" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.401183 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.453855 4902 generic.go:334] "Generic (PLEG): container finished" podID="007ee9b6-2c6f-41f1-9965-d5251e32f88c" containerID="f7bc9f31859e17c0292ea27df99222187360dc3ec70488d77d16e8929bccf145" exitCode=0 Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.453909 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6rft9" event={"ID":"007ee9b6-2c6f-41f1-9965-d5251e32f88c","Type":"ContainerDied","Data":"f7bc9f31859e17c0292ea27df99222187360dc3ec70488d77d16e8929bccf145"} Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.453945 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6rft9" event={"ID":"007ee9b6-2c6f-41f1-9965-d5251e32f88c","Type":"ContainerDied","Data":"74bcd55f39ac51e5f60fed1b4c969df2d3808627cdb5bda9d7cccc13e1fe3081"} Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.453964 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6rft9" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.453975 4902 scope.go:117] "RemoveContainer" containerID="f7bc9f31859e17c0292ea27df99222187360dc3ec70488d77d16e8929bccf145" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.455647 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.487139 4902 scope.go:117] "RemoveContainer" containerID="f7f284a8ad2b213d77cfb37516c59b8223632970943efacc71967f6db5ca05ce" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.518410 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6rft9"] Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.528822 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6rft9"] Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.541655 4902 scope.go:117] "RemoveContainer" containerID="76fddfc8c046d32ed270c0706ef2a5cdcdb006f943e826fa37f21d3838951ef0" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.563869 4902 scope.go:117] "RemoveContainer" containerID="f7bc9f31859e17c0292ea27df99222187360dc3ec70488d77d16e8929bccf145" Dec 02 15:07:29 crc kubenswrapper[4902]: E1202 15:07:29.564293 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7bc9f31859e17c0292ea27df99222187360dc3ec70488d77d16e8929bccf145\": container with ID starting with f7bc9f31859e17c0292ea27df99222187360dc3ec70488d77d16e8929bccf145 not found: ID does not exist" containerID="f7bc9f31859e17c0292ea27df99222187360dc3ec70488d77d16e8929bccf145" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.564344 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7bc9f31859e17c0292ea27df99222187360dc3ec70488d77d16e8929bccf145"} err="failed to get container status \"f7bc9f31859e17c0292ea27df99222187360dc3ec70488d77d16e8929bccf145\": rpc error: code = NotFound desc = could not find container \"f7bc9f31859e17c0292ea27df99222187360dc3ec70488d77d16e8929bccf145\": container with ID starting with f7bc9f31859e17c0292ea27df99222187360dc3ec70488d77d16e8929bccf145 not found: ID does not exist" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.564371 4902 scope.go:117] "RemoveContainer" containerID="f7f284a8ad2b213d77cfb37516c59b8223632970943efacc71967f6db5ca05ce" Dec 02 15:07:29 crc kubenswrapper[4902]: E1202 15:07:29.564715 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7f284a8ad2b213d77cfb37516c59b8223632970943efacc71967f6db5ca05ce\": container with ID starting with f7f284a8ad2b213d77cfb37516c59b8223632970943efacc71967f6db5ca05ce not found: ID does not exist" containerID="f7f284a8ad2b213d77cfb37516c59b8223632970943efacc71967f6db5ca05ce" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.564753 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7f284a8ad2b213d77cfb37516c59b8223632970943efacc71967f6db5ca05ce"} err="failed to get container status \"f7f284a8ad2b213d77cfb37516c59b8223632970943efacc71967f6db5ca05ce\": rpc error: code = NotFound desc = could not find container \"f7f284a8ad2b213d77cfb37516c59b8223632970943efacc71967f6db5ca05ce\": container with ID starting with f7f284a8ad2b213d77cfb37516c59b8223632970943efacc71967f6db5ca05ce not found: ID does not exist" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.564777 4902 scope.go:117] "RemoveContainer" containerID="76fddfc8c046d32ed270c0706ef2a5cdcdb006f943e826fa37f21d3838951ef0" Dec 02 15:07:29 crc kubenswrapper[4902]: E1202 15:07:29.565081 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76fddfc8c046d32ed270c0706ef2a5cdcdb006f943e826fa37f21d3838951ef0\": container with ID starting with 76fddfc8c046d32ed270c0706ef2a5cdcdb006f943e826fa37f21d3838951ef0 not found: ID does not exist" containerID="76fddfc8c046d32ed270c0706ef2a5cdcdb006f943e826fa37f21d3838951ef0" Dec 02 15:07:29 crc kubenswrapper[4902]: I1202 15:07:29.565139 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76fddfc8c046d32ed270c0706ef2a5cdcdb006f943e826fa37f21d3838951ef0"} err="failed to get container status \"76fddfc8c046d32ed270c0706ef2a5cdcdb006f943e826fa37f21d3838951ef0\": rpc error: code = NotFound desc = could not find container \"76fddfc8c046d32ed270c0706ef2a5cdcdb006f943e826fa37f21d3838951ef0\": container with ID starting with 76fddfc8c046d32ed270c0706ef2a5cdcdb006f943e826fa37f21d3838951ef0 not found: ID does not exist" Dec 02 15:07:31 crc kubenswrapper[4902]: I1202 15:07:31.119855 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="007ee9b6-2c6f-41f1-9965-d5251e32f88c" path="/var/lib/kubelet/pods/007ee9b6-2c6f-41f1-9965-d5251e32f88c/volumes" Dec 02 15:07:31 crc kubenswrapper[4902]: I1202 15:07:31.269308 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hn4zm"] Dec 02 15:07:31 crc kubenswrapper[4902]: I1202 15:07:31.269537 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hn4zm" podUID="a4dc6914-1de2-4e84-80ca-c09b397f8b4b" containerName="registry-server" containerID="cri-o://41387876819be2399331430a5e7fbb2e55c65aed2e38a43508ca970dc2b9e5d7" gracePeriod=2 Dec 02 15:07:31 crc kubenswrapper[4902]: I1202 15:07:31.493226 4902 generic.go:334] "Generic (PLEG): container finished" podID="a4dc6914-1de2-4e84-80ca-c09b397f8b4b" containerID="41387876819be2399331430a5e7fbb2e55c65aed2e38a43508ca970dc2b9e5d7" exitCode=0 Dec 02 15:07:31 crc kubenswrapper[4902]: I1202 15:07:31.493335 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hn4zm" event={"ID":"a4dc6914-1de2-4e84-80ca-c09b397f8b4b","Type":"ContainerDied","Data":"41387876819be2399331430a5e7fbb2e55c65aed2e38a43508ca970dc2b9e5d7"} Dec 02 15:07:31 crc kubenswrapper[4902]: I1202 15:07:31.849836 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:31 crc kubenswrapper[4902]: I1202 15:07:31.958029 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-catalog-content\") pod \"a4dc6914-1de2-4e84-80ca-c09b397f8b4b\" (UID: \"a4dc6914-1de2-4e84-80ca-c09b397f8b4b\") " Dec 02 15:07:31 crc kubenswrapper[4902]: I1202 15:07:31.958126 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-utilities\") pod \"a4dc6914-1de2-4e84-80ca-c09b397f8b4b\" (UID: \"a4dc6914-1de2-4e84-80ca-c09b397f8b4b\") " Dec 02 15:07:31 crc kubenswrapper[4902]: I1202 15:07:31.958237 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzfd4\" (UniqueName: \"kubernetes.io/projected/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-kube-api-access-tzfd4\") pod \"a4dc6914-1de2-4e84-80ca-c09b397f8b4b\" (UID: \"a4dc6914-1de2-4e84-80ca-c09b397f8b4b\") " Dec 02 15:07:31 crc kubenswrapper[4902]: I1202 15:07:31.959035 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-utilities" (OuterVolumeSpecName: "utilities") pod "a4dc6914-1de2-4e84-80ca-c09b397f8b4b" (UID: "a4dc6914-1de2-4e84-80ca-c09b397f8b4b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:07:31 crc kubenswrapper[4902]: I1202 15:07:31.964351 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-kube-api-access-tzfd4" (OuterVolumeSpecName: "kube-api-access-tzfd4") pod "a4dc6914-1de2-4e84-80ca-c09b397f8b4b" (UID: "a4dc6914-1de2-4e84-80ca-c09b397f8b4b"). InnerVolumeSpecName "kube-api-access-tzfd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:07:32 crc kubenswrapper[4902]: I1202 15:07:32.001328 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a4dc6914-1de2-4e84-80ca-c09b397f8b4b" (UID: "a4dc6914-1de2-4e84-80ca-c09b397f8b4b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:07:32 crc kubenswrapper[4902]: I1202 15:07:32.060443 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:07:32 crc kubenswrapper[4902]: I1202 15:07:32.060488 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:07:32 crc kubenswrapper[4902]: I1202 15:07:32.060503 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzfd4\" (UniqueName: \"kubernetes.io/projected/a4dc6914-1de2-4e84-80ca-c09b397f8b4b-kube-api-access-tzfd4\") on node \"crc\" DevicePath \"\"" Dec 02 15:07:32 crc kubenswrapper[4902]: I1202 15:07:32.509293 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hn4zm" event={"ID":"a4dc6914-1de2-4e84-80ca-c09b397f8b4b","Type":"ContainerDied","Data":"17f378736371291cedc81b7e4996797bfb9f3fd7ee0edddd963cdbefd31dbd3d"} Dec 02 15:07:32 crc kubenswrapper[4902]: I1202 15:07:32.509398 4902 scope.go:117] "RemoveContainer" containerID="41387876819be2399331430a5e7fbb2e55c65aed2e38a43508ca970dc2b9e5d7" Dec 02 15:07:32 crc kubenswrapper[4902]: I1202 15:07:32.509457 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hn4zm" Dec 02 15:07:32 crc kubenswrapper[4902]: I1202 15:07:32.537473 4902 scope.go:117] "RemoveContainer" containerID="a9dfd1ee4d6bb4e6c9da46a978c01c0bb6b0aeb49d65924ebfa93ca63b9e5892" Dec 02 15:07:32 crc kubenswrapper[4902]: I1202 15:07:32.556728 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hn4zm"] Dec 02 15:07:32 crc kubenswrapper[4902]: I1202 15:07:32.565990 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hn4zm"] Dec 02 15:07:32 crc kubenswrapper[4902]: I1202 15:07:32.574993 4902 scope.go:117] "RemoveContainer" containerID="407ddbf49d7cb0ba997dcbf13bb4f5b67f39ee0a20662fec471e95086750a81c" Dec 02 15:07:33 crc kubenswrapper[4902]: I1202 15:07:33.136027 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4dc6914-1de2-4e84-80ca-c09b397f8b4b" path="/var/lib/kubelet/pods/a4dc6914-1de2-4e84-80ca-c09b397f8b4b/volumes" Dec 02 15:07:34 crc kubenswrapper[4902]: I1202 15:07:34.731714 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:07:34 crc kubenswrapper[4902]: I1202 15:07:34.732460 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:08:04 crc kubenswrapper[4902]: I1202 15:08:04.731649 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:08:04 crc kubenswrapper[4902]: I1202 15:08:04.732081 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:08:34 crc kubenswrapper[4902]: I1202 15:08:34.731183 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:08:34 crc kubenswrapper[4902]: I1202 15:08:34.731759 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:08:34 crc kubenswrapper[4902]: I1202 15:08:34.731808 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 15:08:34 crc kubenswrapper[4902]: I1202 15:08:34.732483 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 15:08:34 crc kubenswrapper[4902]: I1202 15:08:34.732582 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" gracePeriod=600 Dec 02 15:08:34 crc kubenswrapper[4902]: E1202 15:08:34.854237 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:08:35 crc kubenswrapper[4902]: I1202 15:08:35.188135 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" exitCode=0 Dec 02 15:08:35 crc kubenswrapper[4902]: I1202 15:08:35.188216 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b"} Dec 02 15:08:35 crc kubenswrapper[4902]: I1202 15:08:35.188308 4902 scope.go:117] "RemoveContainer" containerID="9e888b9484cfa25ccd5bb37bb6aeb3dccfcd6727570895d583db033537ec4157" Dec 02 15:08:35 crc kubenswrapper[4902]: I1202 15:08:35.189927 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:08:35 crc kubenswrapper[4902]: E1202 15:08:35.190630 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:08:50 crc kubenswrapper[4902]: I1202 15:08:50.107485 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:08:50 crc kubenswrapper[4902]: E1202 15:08:50.108491 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:09:05 crc kubenswrapper[4902]: I1202 15:09:05.107290 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:09:05 crc kubenswrapper[4902]: E1202 15:09:05.108742 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:09:19 crc kubenswrapper[4902]: I1202 15:09:19.113627 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:09:19 crc kubenswrapper[4902]: E1202 15:09:19.114356 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:09:30 crc kubenswrapper[4902]: I1202 15:09:30.107022 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:09:30 crc kubenswrapper[4902]: E1202 15:09:30.108120 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:09:41 crc kubenswrapper[4902]: I1202 15:09:41.107766 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:09:41 crc kubenswrapper[4902]: E1202 15:09:41.109029 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:09:55 crc kubenswrapper[4902]: I1202 15:09:55.106872 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:09:55 crc kubenswrapper[4902]: E1202 15:09:55.108127 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:10:09 crc kubenswrapper[4902]: I1202 15:10:09.113920 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:10:09 crc kubenswrapper[4902]: E1202 15:10:09.114943 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:10:21 crc kubenswrapper[4902]: I1202 15:10:21.107550 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:10:21 crc kubenswrapper[4902]: E1202 15:10:21.108822 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:10:35 crc kubenswrapper[4902]: I1202 15:10:35.107219 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:10:35 crc kubenswrapper[4902]: E1202 15:10:35.108237 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:10:47 crc kubenswrapper[4902]: I1202 15:10:47.108055 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:10:47 crc kubenswrapper[4902]: E1202 15:10:47.109052 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:11:02 crc kubenswrapper[4902]: I1202 15:11:02.107062 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:11:02 crc kubenswrapper[4902]: E1202 15:11:02.108277 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:11:17 crc kubenswrapper[4902]: I1202 15:11:17.109131 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:11:17 crc kubenswrapper[4902]: E1202 15:11:17.110355 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:11:28 crc kubenswrapper[4902]: I1202 15:11:28.106235 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:11:28 crc kubenswrapper[4902]: E1202 15:11:28.107011 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:11:39 crc kubenswrapper[4902]: I1202 15:11:39.117288 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:11:39 crc kubenswrapper[4902]: E1202 15:11:39.118059 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:11:53 crc kubenswrapper[4902]: I1202 15:11:53.107302 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:11:53 crc kubenswrapper[4902]: E1202 15:11:53.108208 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.434171 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-f4mbp"] Dec 02 15:12:02 crc kubenswrapper[4902]: E1202 15:12:02.435886 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4dc6914-1de2-4e84-80ca-c09b397f8b4b" containerName="extract-content" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.435925 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4dc6914-1de2-4e84-80ca-c09b397f8b4b" containerName="extract-content" Dec 02 15:12:02 crc kubenswrapper[4902]: E1202 15:12:02.435952 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="007ee9b6-2c6f-41f1-9965-d5251e32f88c" containerName="extract-utilities" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.435970 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="007ee9b6-2c6f-41f1-9965-d5251e32f88c" containerName="extract-utilities" Dec 02 15:12:02 crc kubenswrapper[4902]: E1202 15:12:02.436002 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="007ee9b6-2c6f-41f1-9965-d5251e32f88c" containerName="extract-content" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.436023 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="007ee9b6-2c6f-41f1-9965-d5251e32f88c" containerName="extract-content" Dec 02 15:12:02 crc kubenswrapper[4902]: E1202 15:12:02.436060 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4dc6914-1de2-4e84-80ca-c09b397f8b4b" containerName="registry-server" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.436078 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4dc6914-1de2-4e84-80ca-c09b397f8b4b" containerName="registry-server" Dec 02 15:12:02 crc kubenswrapper[4902]: E1202 15:12:02.436111 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4dc6914-1de2-4e84-80ca-c09b397f8b4b" containerName="extract-utilities" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.436130 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4dc6914-1de2-4e84-80ca-c09b397f8b4b" containerName="extract-utilities" Dec 02 15:12:02 crc kubenswrapper[4902]: E1202 15:12:02.436170 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="007ee9b6-2c6f-41f1-9965-d5251e32f88c" containerName="registry-server" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.436191 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="007ee9b6-2c6f-41f1-9965-d5251e32f88c" containerName="registry-server" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.436743 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="007ee9b6-2c6f-41f1-9965-d5251e32f88c" containerName="registry-server" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.438810 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4dc6914-1de2-4e84-80ca-c09b397f8b4b" containerName="registry-server" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.443262 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.459040 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f4mbp"] Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.500057 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-catalog-content\") pod \"redhat-marketplace-f4mbp\" (UID: \"ea02500a-8808-4c52-9d16-fa7a0a1acc8d\") " pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.500126 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-728r2\" (UniqueName: \"kubernetes.io/projected/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-kube-api-access-728r2\") pod \"redhat-marketplace-f4mbp\" (UID: \"ea02500a-8808-4c52-9d16-fa7a0a1acc8d\") " pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.500948 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-utilities\") pod \"redhat-marketplace-f4mbp\" (UID: \"ea02500a-8808-4c52-9d16-fa7a0a1acc8d\") " pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.603283 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-utilities\") pod \"redhat-marketplace-f4mbp\" (UID: \"ea02500a-8808-4c52-9d16-fa7a0a1acc8d\") " pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.603367 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-catalog-content\") pod \"redhat-marketplace-f4mbp\" (UID: \"ea02500a-8808-4c52-9d16-fa7a0a1acc8d\") " pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.603415 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-728r2\" (UniqueName: \"kubernetes.io/projected/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-kube-api-access-728r2\") pod \"redhat-marketplace-f4mbp\" (UID: \"ea02500a-8808-4c52-9d16-fa7a0a1acc8d\") " pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.603929 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-utilities\") pod \"redhat-marketplace-f4mbp\" (UID: \"ea02500a-8808-4c52-9d16-fa7a0a1acc8d\") " pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.604034 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-catalog-content\") pod \"redhat-marketplace-f4mbp\" (UID: \"ea02500a-8808-4c52-9d16-fa7a0a1acc8d\") " pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.625136 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-728r2\" (UniqueName: \"kubernetes.io/projected/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-kube-api-access-728r2\") pod \"redhat-marketplace-f4mbp\" (UID: \"ea02500a-8808-4c52-9d16-fa7a0a1acc8d\") " pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:02 crc kubenswrapper[4902]: I1202 15:12:02.783441 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:03 crc kubenswrapper[4902]: I1202 15:12:03.299364 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f4mbp"] Dec 02 15:12:03 crc kubenswrapper[4902]: I1202 15:12:03.542692 4902 generic.go:334] "Generic (PLEG): container finished" podID="ea02500a-8808-4c52-9d16-fa7a0a1acc8d" containerID="c8e9ab17367bd25203f4de1be85b47b4b3b2a1e5f71f27faea06242158511265" exitCode=0 Dec 02 15:12:03 crc kubenswrapper[4902]: I1202 15:12:03.542814 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f4mbp" event={"ID":"ea02500a-8808-4c52-9d16-fa7a0a1acc8d","Type":"ContainerDied","Data":"c8e9ab17367bd25203f4de1be85b47b4b3b2a1e5f71f27faea06242158511265"} Dec 02 15:12:03 crc kubenswrapper[4902]: I1202 15:12:03.543007 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f4mbp" event={"ID":"ea02500a-8808-4c52-9d16-fa7a0a1acc8d","Type":"ContainerStarted","Data":"4b28287dc654ac8aaef28bb01ce67af4f3c16b75290fe3f4f92b02c749cb5f1f"} Dec 02 15:12:04 crc kubenswrapper[4902]: I1202 15:12:04.553255 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f4mbp" event={"ID":"ea02500a-8808-4c52-9d16-fa7a0a1acc8d","Type":"ContainerStarted","Data":"d04e0e4acdd0ebbb478d0266dbf4868e55a503cadf0d17ac005afa5acdf58210"} Dec 02 15:12:05 crc kubenswrapper[4902]: I1202 15:12:05.567699 4902 generic.go:334] "Generic (PLEG): container finished" podID="ea02500a-8808-4c52-9d16-fa7a0a1acc8d" containerID="d04e0e4acdd0ebbb478d0266dbf4868e55a503cadf0d17ac005afa5acdf58210" exitCode=0 Dec 02 15:12:05 crc kubenswrapper[4902]: I1202 15:12:05.567899 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f4mbp" event={"ID":"ea02500a-8808-4c52-9d16-fa7a0a1acc8d","Type":"ContainerDied","Data":"d04e0e4acdd0ebbb478d0266dbf4868e55a503cadf0d17ac005afa5acdf58210"} Dec 02 15:12:06 crc kubenswrapper[4902]: I1202 15:12:06.578981 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f4mbp" event={"ID":"ea02500a-8808-4c52-9d16-fa7a0a1acc8d","Type":"ContainerStarted","Data":"d9d3ec9049842fb6262e03cb9c81a288a44c717aaee32c6d6d2eb0ce5df9b35e"} Dec 02 15:12:06 crc kubenswrapper[4902]: I1202 15:12:06.613676 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-f4mbp" podStartSLOduration=1.9176573289999999 podStartE2EDuration="4.613650001s" podCreationTimestamp="2025-12-02 15:12:02 +0000 UTC" firstStartedPulling="2025-12-02 15:12:03.544292051 +0000 UTC m=+3354.735600760" lastFinishedPulling="2025-12-02 15:12:06.240284733 +0000 UTC m=+3357.431593432" observedRunningTime="2025-12-02 15:12:06.604819811 +0000 UTC m=+3357.796128520" watchObservedRunningTime="2025-12-02 15:12:06.613650001 +0000 UTC m=+3357.804958720" Dec 02 15:12:08 crc kubenswrapper[4902]: I1202 15:12:08.106622 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:12:08 crc kubenswrapper[4902]: E1202 15:12:08.107407 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:12:12 crc kubenswrapper[4902]: I1202 15:12:12.785155 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:12 crc kubenswrapper[4902]: I1202 15:12:12.786008 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:12 crc kubenswrapper[4902]: I1202 15:12:12.849121 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:13 crc kubenswrapper[4902]: I1202 15:12:13.707540 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:13 crc kubenswrapper[4902]: I1202 15:12:13.781073 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-f4mbp"] Dec 02 15:12:15 crc kubenswrapper[4902]: I1202 15:12:15.669027 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-f4mbp" podUID="ea02500a-8808-4c52-9d16-fa7a0a1acc8d" containerName="registry-server" containerID="cri-o://d9d3ec9049842fb6262e03cb9c81a288a44c717aaee32c6d6d2eb0ce5df9b35e" gracePeriod=2 Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.236787 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.384711 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-catalog-content\") pod \"ea02500a-8808-4c52-9d16-fa7a0a1acc8d\" (UID: \"ea02500a-8808-4c52-9d16-fa7a0a1acc8d\") " Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.384766 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-728r2\" (UniqueName: \"kubernetes.io/projected/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-kube-api-access-728r2\") pod \"ea02500a-8808-4c52-9d16-fa7a0a1acc8d\" (UID: \"ea02500a-8808-4c52-9d16-fa7a0a1acc8d\") " Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.384788 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-utilities\") pod \"ea02500a-8808-4c52-9d16-fa7a0a1acc8d\" (UID: \"ea02500a-8808-4c52-9d16-fa7a0a1acc8d\") " Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.385890 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-utilities" (OuterVolumeSpecName: "utilities") pod "ea02500a-8808-4c52-9d16-fa7a0a1acc8d" (UID: "ea02500a-8808-4c52-9d16-fa7a0a1acc8d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.399479 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-kube-api-access-728r2" (OuterVolumeSpecName: "kube-api-access-728r2") pod "ea02500a-8808-4c52-9d16-fa7a0a1acc8d" (UID: "ea02500a-8808-4c52-9d16-fa7a0a1acc8d"). InnerVolumeSpecName "kube-api-access-728r2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.400697 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ea02500a-8808-4c52-9d16-fa7a0a1acc8d" (UID: "ea02500a-8808-4c52-9d16-fa7a0a1acc8d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.488137 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.488173 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-728r2\" (UniqueName: \"kubernetes.io/projected/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-kube-api-access-728r2\") on node \"crc\" DevicePath \"\"" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.488186 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea02500a-8808-4c52-9d16-fa7a0a1acc8d-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.680001 4902 generic.go:334] "Generic (PLEG): container finished" podID="ea02500a-8808-4c52-9d16-fa7a0a1acc8d" containerID="d9d3ec9049842fb6262e03cb9c81a288a44c717aaee32c6d6d2eb0ce5df9b35e" exitCode=0 Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.680087 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f4mbp" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.680131 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f4mbp" event={"ID":"ea02500a-8808-4c52-9d16-fa7a0a1acc8d","Type":"ContainerDied","Data":"d9d3ec9049842fb6262e03cb9c81a288a44c717aaee32c6d6d2eb0ce5df9b35e"} Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.685346 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f4mbp" event={"ID":"ea02500a-8808-4c52-9d16-fa7a0a1acc8d","Type":"ContainerDied","Data":"4b28287dc654ac8aaef28bb01ce67af4f3c16b75290fe3f4f92b02c749cb5f1f"} Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.685391 4902 scope.go:117] "RemoveContainer" containerID="d9d3ec9049842fb6262e03cb9c81a288a44c717aaee32c6d6d2eb0ce5df9b35e" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.724538 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-f4mbp"] Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.730288 4902 scope.go:117] "RemoveContainer" containerID="d04e0e4acdd0ebbb478d0266dbf4868e55a503cadf0d17ac005afa5acdf58210" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.740716 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-f4mbp"] Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.753619 4902 scope.go:117] "RemoveContainer" containerID="c8e9ab17367bd25203f4de1be85b47b4b3b2a1e5f71f27faea06242158511265" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.818527 4902 scope.go:117] "RemoveContainer" containerID="d9d3ec9049842fb6262e03cb9c81a288a44c717aaee32c6d6d2eb0ce5df9b35e" Dec 02 15:12:16 crc kubenswrapper[4902]: E1202 15:12:16.819126 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9d3ec9049842fb6262e03cb9c81a288a44c717aaee32c6d6d2eb0ce5df9b35e\": container with ID starting with d9d3ec9049842fb6262e03cb9c81a288a44c717aaee32c6d6d2eb0ce5df9b35e not found: ID does not exist" containerID="d9d3ec9049842fb6262e03cb9c81a288a44c717aaee32c6d6d2eb0ce5df9b35e" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.819157 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9d3ec9049842fb6262e03cb9c81a288a44c717aaee32c6d6d2eb0ce5df9b35e"} err="failed to get container status \"d9d3ec9049842fb6262e03cb9c81a288a44c717aaee32c6d6d2eb0ce5df9b35e\": rpc error: code = NotFound desc = could not find container \"d9d3ec9049842fb6262e03cb9c81a288a44c717aaee32c6d6d2eb0ce5df9b35e\": container with ID starting with d9d3ec9049842fb6262e03cb9c81a288a44c717aaee32c6d6d2eb0ce5df9b35e not found: ID does not exist" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.819181 4902 scope.go:117] "RemoveContainer" containerID="d04e0e4acdd0ebbb478d0266dbf4868e55a503cadf0d17ac005afa5acdf58210" Dec 02 15:12:16 crc kubenswrapper[4902]: E1202 15:12:16.819591 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d04e0e4acdd0ebbb478d0266dbf4868e55a503cadf0d17ac005afa5acdf58210\": container with ID starting with d04e0e4acdd0ebbb478d0266dbf4868e55a503cadf0d17ac005afa5acdf58210 not found: ID does not exist" containerID="d04e0e4acdd0ebbb478d0266dbf4868e55a503cadf0d17ac005afa5acdf58210" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.819615 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d04e0e4acdd0ebbb478d0266dbf4868e55a503cadf0d17ac005afa5acdf58210"} err="failed to get container status \"d04e0e4acdd0ebbb478d0266dbf4868e55a503cadf0d17ac005afa5acdf58210\": rpc error: code = NotFound desc = could not find container \"d04e0e4acdd0ebbb478d0266dbf4868e55a503cadf0d17ac005afa5acdf58210\": container with ID starting with d04e0e4acdd0ebbb478d0266dbf4868e55a503cadf0d17ac005afa5acdf58210 not found: ID does not exist" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.819628 4902 scope.go:117] "RemoveContainer" containerID="c8e9ab17367bd25203f4de1be85b47b4b3b2a1e5f71f27faea06242158511265" Dec 02 15:12:16 crc kubenswrapper[4902]: E1202 15:12:16.823116 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8e9ab17367bd25203f4de1be85b47b4b3b2a1e5f71f27faea06242158511265\": container with ID starting with c8e9ab17367bd25203f4de1be85b47b4b3b2a1e5f71f27faea06242158511265 not found: ID does not exist" containerID="c8e9ab17367bd25203f4de1be85b47b4b3b2a1e5f71f27faea06242158511265" Dec 02 15:12:16 crc kubenswrapper[4902]: I1202 15:12:16.823146 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8e9ab17367bd25203f4de1be85b47b4b3b2a1e5f71f27faea06242158511265"} err="failed to get container status \"c8e9ab17367bd25203f4de1be85b47b4b3b2a1e5f71f27faea06242158511265\": rpc error: code = NotFound desc = could not find container \"c8e9ab17367bd25203f4de1be85b47b4b3b2a1e5f71f27faea06242158511265\": container with ID starting with c8e9ab17367bd25203f4de1be85b47b4b3b2a1e5f71f27faea06242158511265 not found: ID does not exist" Dec 02 15:12:17 crc kubenswrapper[4902]: I1202 15:12:17.125406 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea02500a-8808-4c52-9d16-fa7a0a1acc8d" path="/var/lib/kubelet/pods/ea02500a-8808-4c52-9d16-fa7a0a1acc8d/volumes" Dec 02 15:12:23 crc kubenswrapper[4902]: I1202 15:12:23.106642 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:12:23 crc kubenswrapper[4902]: E1202 15:12:23.107214 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:12:28 crc kubenswrapper[4902]: I1202 15:12:28.825959 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-r8krv"] Dec 02 15:12:28 crc kubenswrapper[4902]: E1202 15:12:28.831433 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea02500a-8808-4c52-9d16-fa7a0a1acc8d" containerName="extract-content" Dec 02 15:12:28 crc kubenswrapper[4902]: I1202 15:12:28.831463 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea02500a-8808-4c52-9d16-fa7a0a1acc8d" containerName="extract-content" Dec 02 15:12:28 crc kubenswrapper[4902]: E1202 15:12:28.831484 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea02500a-8808-4c52-9d16-fa7a0a1acc8d" containerName="registry-server" Dec 02 15:12:28 crc kubenswrapper[4902]: I1202 15:12:28.831491 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea02500a-8808-4c52-9d16-fa7a0a1acc8d" containerName="registry-server" Dec 02 15:12:28 crc kubenswrapper[4902]: E1202 15:12:28.831522 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea02500a-8808-4c52-9d16-fa7a0a1acc8d" containerName="extract-utilities" Dec 02 15:12:28 crc kubenswrapper[4902]: I1202 15:12:28.831530 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea02500a-8808-4c52-9d16-fa7a0a1acc8d" containerName="extract-utilities" Dec 02 15:12:28 crc kubenswrapper[4902]: I1202 15:12:28.831799 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea02500a-8808-4c52-9d16-fa7a0a1acc8d" containerName="registry-server" Dec 02 15:12:28 crc kubenswrapper[4902]: I1202 15:12:28.833513 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:28 crc kubenswrapper[4902]: I1202 15:12:28.869671 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r8krv"] Dec 02 15:12:28 crc kubenswrapper[4902]: I1202 15:12:28.983258 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/081f098a-4603-4077-aafb-b91aa3e9bd86-utilities\") pod \"redhat-operators-r8krv\" (UID: \"081f098a-4603-4077-aafb-b91aa3e9bd86\") " pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:28 crc kubenswrapper[4902]: I1202 15:12:28.983367 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx2hp\" (UniqueName: \"kubernetes.io/projected/081f098a-4603-4077-aafb-b91aa3e9bd86-kube-api-access-cx2hp\") pod \"redhat-operators-r8krv\" (UID: \"081f098a-4603-4077-aafb-b91aa3e9bd86\") " pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:28 crc kubenswrapper[4902]: I1202 15:12:28.983490 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/081f098a-4603-4077-aafb-b91aa3e9bd86-catalog-content\") pod \"redhat-operators-r8krv\" (UID: \"081f098a-4603-4077-aafb-b91aa3e9bd86\") " pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:29 crc kubenswrapper[4902]: I1202 15:12:29.085028 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/081f098a-4603-4077-aafb-b91aa3e9bd86-catalog-content\") pod \"redhat-operators-r8krv\" (UID: \"081f098a-4603-4077-aafb-b91aa3e9bd86\") " pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:29 crc kubenswrapper[4902]: I1202 15:12:29.085209 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/081f098a-4603-4077-aafb-b91aa3e9bd86-utilities\") pod \"redhat-operators-r8krv\" (UID: \"081f098a-4603-4077-aafb-b91aa3e9bd86\") " pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:29 crc kubenswrapper[4902]: I1202 15:12:29.085293 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx2hp\" (UniqueName: \"kubernetes.io/projected/081f098a-4603-4077-aafb-b91aa3e9bd86-kube-api-access-cx2hp\") pod \"redhat-operators-r8krv\" (UID: \"081f098a-4603-4077-aafb-b91aa3e9bd86\") " pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:29 crc kubenswrapper[4902]: I1202 15:12:29.085836 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/081f098a-4603-4077-aafb-b91aa3e9bd86-utilities\") pod \"redhat-operators-r8krv\" (UID: \"081f098a-4603-4077-aafb-b91aa3e9bd86\") " pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:29 crc kubenswrapper[4902]: I1202 15:12:29.086307 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/081f098a-4603-4077-aafb-b91aa3e9bd86-catalog-content\") pod \"redhat-operators-r8krv\" (UID: \"081f098a-4603-4077-aafb-b91aa3e9bd86\") " pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:29 crc kubenswrapper[4902]: I1202 15:12:29.109550 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cx2hp\" (UniqueName: \"kubernetes.io/projected/081f098a-4603-4077-aafb-b91aa3e9bd86-kube-api-access-cx2hp\") pod \"redhat-operators-r8krv\" (UID: \"081f098a-4603-4077-aafb-b91aa3e9bd86\") " pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:29 crc kubenswrapper[4902]: I1202 15:12:29.162652 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:29 crc kubenswrapper[4902]: I1202 15:12:29.639996 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r8krv"] Dec 02 15:12:29 crc kubenswrapper[4902]: I1202 15:12:29.815450 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r8krv" event={"ID":"081f098a-4603-4077-aafb-b91aa3e9bd86","Type":"ContainerStarted","Data":"bcd76f5e7a12346872e33c3d26c302c7f34ec059966ae01751f6b4f376e26f23"} Dec 02 15:12:30 crc kubenswrapper[4902]: I1202 15:12:30.836171 4902 generic.go:334] "Generic (PLEG): container finished" podID="081f098a-4603-4077-aafb-b91aa3e9bd86" containerID="f9f4598e74b83554fabd13952e514f5761bb2125053309dddf06845d805367a5" exitCode=0 Dec 02 15:12:30 crc kubenswrapper[4902]: I1202 15:12:30.836242 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r8krv" event={"ID":"081f098a-4603-4077-aafb-b91aa3e9bd86","Type":"ContainerDied","Data":"f9f4598e74b83554fabd13952e514f5761bb2125053309dddf06845d805367a5"} Dec 02 15:12:30 crc kubenswrapper[4902]: I1202 15:12:30.839720 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 15:12:32 crc kubenswrapper[4902]: I1202 15:12:32.866136 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r8krv" event={"ID":"081f098a-4603-4077-aafb-b91aa3e9bd86","Type":"ContainerStarted","Data":"659f2d93901228e7bb6f0ea6c9e9949c3fc83ff3cb93ec066e304919b0c2634a"} Dec 02 15:12:34 crc kubenswrapper[4902]: I1202 15:12:34.892746 4902 generic.go:334] "Generic (PLEG): container finished" podID="081f098a-4603-4077-aafb-b91aa3e9bd86" containerID="659f2d93901228e7bb6f0ea6c9e9949c3fc83ff3cb93ec066e304919b0c2634a" exitCode=0 Dec 02 15:12:34 crc kubenswrapper[4902]: I1202 15:12:34.892827 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r8krv" event={"ID":"081f098a-4603-4077-aafb-b91aa3e9bd86","Type":"ContainerDied","Data":"659f2d93901228e7bb6f0ea6c9e9949c3fc83ff3cb93ec066e304919b0c2634a"} Dec 02 15:12:35 crc kubenswrapper[4902]: I1202 15:12:35.106729 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:12:35 crc kubenswrapper[4902]: E1202 15:12:35.107146 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:12:35 crc kubenswrapper[4902]: I1202 15:12:35.924500 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r8krv" event={"ID":"081f098a-4603-4077-aafb-b91aa3e9bd86","Type":"ContainerStarted","Data":"848c4a7b7306134e72b155ab9161f550374fc0ad640b0dfb0b0a30a8f8ba7210"} Dec 02 15:12:35 crc kubenswrapper[4902]: I1202 15:12:35.966167 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-r8krv" podStartSLOduration=3.479281439 podStartE2EDuration="7.9661383s" podCreationTimestamp="2025-12-02 15:12:28 +0000 UTC" firstStartedPulling="2025-12-02 15:12:30.8392925 +0000 UTC m=+3382.030601219" lastFinishedPulling="2025-12-02 15:12:35.326149361 +0000 UTC m=+3386.517458080" observedRunningTime="2025-12-02 15:12:35.955727265 +0000 UTC m=+3387.147035994" watchObservedRunningTime="2025-12-02 15:12:35.9661383 +0000 UTC m=+3387.157447009" Dec 02 15:12:39 crc kubenswrapper[4902]: I1202 15:12:39.164728 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:39 crc kubenswrapper[4902]: I1202 15:12:39.166308 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:40 crc kubenswrapper[4902]: I1202 15:12:40.232369 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-r8krv" podUID="081f098a-4603-4077-aafb-b91aa3e9bd86" containerName="registry-server" probeResult="failure" output=< Dec 02 15:12:40 crc kubenswrapper[4902]: timeout: failed to connect service ":50051" within 1s Dec 02 15:12:40 crc kubenswrapper[4902]: > Dec 02 15:12:46 crc kubenswrapper[4902]: I1202 15:12:46.107088 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:12:46 crc kubenswrapper[4902]: E1202 15:12:46.108039 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:12:49 crc kubenswrapper[4902]: I1202 15:12:49.258858 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:49 crc kubenswrapper[4902]: I1202 15:12:49.313285 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:49 crc kubenswrapper[4902]: I1202 15:12:49.501096 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r8krv"] Dec 02 15:12:51 crc kubenswrapper[4902]: I1202 15:12:51.174084 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-r8krv" podUID="081f098a-4603-4077-aafb-b91aa3e9bd86" containerName="registry-server" containerID="cri-o://848c4a7b7306134e72b155ab9161f550374fc0ad640b0dfb0b0a30a8f8ba7210" gracePeriod=2 Dec 02 15:12:51 crc kubenswrapper[4902]: I1202 15:12:51.825034 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:51 crc kubenswrapper[4902]: I1202 15:12:51.956443 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/081f098a-4603-4077-aafb-b91aa3e9bd86-catalog-content\") pod \"081f098a-4603-4077-aafb-b91aa3e9bd86\" (UID: \"081f098a-4603-4077-aafb-b91aa3e9bd86\") " Dec 02 15:12:51 crc kubenswrapper[4902]: I1202 15:12:51.956661 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cx2hp\" (UniqueName: \"kubernetes.io/projected/081f098a-4603-4077-aafb-b91aa3e9bd86-kube-api-access-cx2hp\") pod \"081f098a-4603-4077-aafb-b91aa3e9bd86\" (UID: \"081f098a-4603-4077-aafb-b91aa3e9bd86\") " Dec 02 15:12:51 crc kubenswrapper[4902]: I1202 15:12:51.956775 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/081f098a-4603-4077-aafb-b91aa3e9bd86-utilities\") pod \"081f098a-4603-4077-aafb-b91aa3e9bd86\" (UID: \"081f098a-4603-4077-aafb-b91aa3e9bd86\") " Dec 02 15:12:51 crc kubenswrapper[4902]: I1202 15:12:51.957237 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/081f098a-4603-4077-aafb-b91aa3e9bd86-utilities" (OuterVolumeSpecName: "utilities") pod "081f098a-4603-4077-aafb-b91aa3e9bd86" (UID: "081f098a-4603-4077-aafb-b91aa3e9bd86"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:12:51 crc kubenswrapper[4902]: I1202 15:12:51.964991 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/081f098a-4603-4077-aafb-b91aa3e9bd86-kube-api-access-cx2hp" (OuterVolumeSpecName: "kube-api-access-cx2hp") pod "081f098a-4603-4077-aafb-b91aa3e9bd86" (UID: "081f098a-4603-4077-aafb-b91aa3e9bd86"). InnerVolumeSpecName "kube-api-access-cx2hp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.058873 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cx2hp\" (UniqueName: \"kubernetes.io/projected/081f098a-4603-4077-aafb-b91aa3e9bd86-kube-api-access-cx2hp\") on node \"crc\" DevicePath \"\"" Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.058917 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/081f098a-4603-4077-aafb-b91aa3e9bd86-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.061209 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/081f098a-4603-4077-aafb-b91aa3e9bd86-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "081f098a-4603-4077-aafb-b91aa3e9bd86" (UID: "081f098a-4603-4077-aafb-b91aa3e9bd86"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.161462 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/081f098a-4603-4077-aafb-b91aa3e9bd86-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.186092 4902 generic.go:334] "Generic (PLEG): container finished" podID="081f098a-4603-4077-aafb-b91aa3e9bd86" containerID="848c4a7b7306134e72b155ab9161f550374fc0ad640b0dfb0b0a30a8f8ba7210" exitCode=0 Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.186161 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r8krv" Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.186170 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r8krv" event={"ID":"081f098a-4603-4077-aafb-b91aa3e9bd86","Type":"ContainerDied","Data":"848c4a7b7306134e72b155ab9161f550374fc0ad640b0dfb0b0a30a8f8ba7210"} Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.186225 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r8krv" event={"ID":"081f098a-4603-4077-aafb-b91aa3e9bd86","Type":"ContainerDied","Data":"bcd76f5e7a12346872e33c3d26c302c7f34ec059966ae01751f6b4f376e26f23"} Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.186248 4902 scope.go:117] "RemoveContainer" containerID="848c4a7b7306134e72b155ab9161f550374fc0ad640b0dfb0b0a30a8f8ba7210" Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.230217 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r8krv"] Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.232896 4902 scope.go:117] "RemoveContainer" containerID="659f2d93901228e7bb6f0ea6c9e9949c3fc83ff3cb93ec066e304919b0c2634a" Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.241509 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-r8krv"] Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.265192 4902 scope.go:117] "RemoveContainer" containerID="f9f4598e74b83554fabd13952e514f5761bb2125053309dddf06845d805367a5" Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.333290 4902 scope.go:117] "RemoveContainer" containerID="848c4a7b7306134e72b155ab9161f550374fc0ad640b0dfb0b0a30a8f8ba7210" Dec 02 15:12:52 crc kubenswrapper[4902]: E1202 15:12:52.334523 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"848c4a7b7306134e72b155ab9161f550374fc0ad640b0dfb0b0a30a8f8ba7210\": container with ID starting with 848c4a7b7306134e72b155ab9161f550374fc0ad640b0dfb0b0a30a8f8ba7210 not found: ID does not exist" containerID="848c4a7b7306134e72b155ab9161f550374fc0ad640b0dfb0b0a30a8f8ba7210" Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.334555 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"848c4a7b7306134e72b155ab9161f550374fc0ad640b0dfb0b0a30a8f8ba7210"} err="failed to get container status \"848c4a7b7306134e72b155ab9161f550374fc0ad640b0dfb0b0a30a8f8ba7210\": rpc error: code = NotFound desc = could not find container \"848c4a7b7306134e72b155ab9161f550374fc0ad640b0dfb0b0a30a8f8ba7210\": container with ID starting with 848c4a7b7306134e72b155ab9161f550374fc0ad640b0dfb0b0a30a8f8ba7210 not found: ID does not exist" Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.334600 4902 scope.go:117] "RemoveContainer" containerID="659f2d93901228e7bb6f0ea6c9e9949c3fc83ff3cb93ec066e304919b0c2634a" Dec 02 15:12:52 crc kubenswrapper[4902]: E1202 15:12:52.334894 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"659f2d93901228e7bb6f0ea6c9e9949c3fc83ff3cb93ec066e304919b0c2634a\": container with ID starting with 659f2d93901228e7bb6f0ea6c9e9949c3fc83ff3cb93ec066e304919b0c2634a not found: ID does not exist" containerID="659f2d93901228e7bb6f0ea6c9e9949c3fc83ff3cb93ec066e304919b0c2634a" Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.334958 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"659f2d93901228e7bb6f0ea6c9e9949c3fc83ff3cb93ec066e304919b0c2634a"} err="failed to get container status \"659f2d93901228e7bb6f0ea6c9e9949c3fc83ff3cb93ec066e304919b0c2634a\": rpc error: code = NotFound desc = could not find container \"659f2d93901228e7bb6f0ea6c9e9949c3fc83ff3cb93ec066e304919b0c2634a\": container with ID starting with 659f2d93901228e7bb6f0ea6c9e9949c3fc83ff3cb93ec066e304919b0c2634a not found: ID does not exist" Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.334997 4902 scope.go:117] "RemoveContainer" containerID="f9f4598e74b83554fabd13952e514f5761bb2125053309dddf06845d805367a5" Dec 02 15:12:52 crc kubenswrapper[4902]: E1202 15:12:52.335470 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9f4598e74b83554fabd13952e514f5761bb2125053309dddf06845d805367a5\": container with ID starting with f9f4598e74b83554fabd13952e514f5761bb2125053309dddf06845d805367a5 not found: ID does not exist" containerID="f9f4598e74b83554fabd13952e514f5761bb2125053309dddf06845d805367a5" Dec 02 15:12:52 crc kubenswrapper[4902]: I1202 15:12:52.335493 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9f4598e74b83554fabd13952e514f5761bb2125053309dddf06845d805367a5"} err="failed to get container status \"f9f4598e74b83554fabd13952e514f5761bb2125053309dddf06845d805367a5\": rpc error: code = NotFound desc = could not find container \"f9f4598e74b83554fabd13952e514f5761bb2125053309dddf06845d805367a5\": container with ID starting with f9f4598e74b83554fabd13952e514f5761bb2125053309dddf06845d805367a5 not found: ID does not exist" Dec 02 15:12:53 crc kubenswrapper[4902]: I1202 15:12:53.130298 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="081f098a-4603-4077-aafb-b91aa3e9bd86" path="/var/lib/kubelet/pods/081f098a-4603-4077-aafb-b91aa3e9bd86/volumes" Dec 02 15:12:59 crc kubenswrapper[4902]: I1202 15:12:59.116641 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:12:59 crc kubenswrapper[4902]: E1202 15:12:59.117357 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:13:14 crc kubenswrapper[4902]: I1202 15:13:14.107133 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:13:14 crc kubenswrapper[4902]: E1202 15:13:14.108202 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:13:27 crc kubenswrapper[4902]: I1202 15:13:27.106950 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:13:27 crc kubenswrapper[4902]: E1202 15:13:27.107848 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:13:39 crc kubenswrapper[4902]: I1202 15:13:39.106260 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:13:39 crc kubenswrapper[4902]: I1202 15:13:39.731297 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"1b578866bd1e33784f7438ab2d7a99bd871df481e636cf8d3653d5e78e867aa9"} Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.176400 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t"] Dec 02 15:15:00 crc kubenswrapper[4902]: E1202 15:15:00.177513 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="081f098a-4603-4077-aafb-b91aa3e9bd86" containerName="extract-content" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.177529 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="081f098a-4603-4077-aafb-b91aa3e9bd86" containerName="extract-content" Dec 02 15:15:00 crc kubenswrapper[4902]: E1202 15:15:00.177597 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="081f098a-4603-4077-aafb-b91aa3e9bd86" containerName="extract-utilities" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.177610 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="081f098a-4603-4077-aafb-b91aa3e9bd86" containerName="extract-utilities" Dec 02 15:15:00 crc kubenswrapper[4902]: E1202 15:15:00.177638 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="081f098a-4603-4077-aafb-b91aa3e9bd86" containerName="registry-server" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.177647 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="081f098a-4603-4077-aafb-b91aa3e9bd86" containerName="registry-server" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.177941 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="081f098a-4603-4077-aafb-b91aa3e9bd86" containerName="registry-server" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.178822 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.181417 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.185065 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.186469 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t"] Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.339263 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fd3522ae-b165-4ded-80f8-e2169bd7036b-secret-volume\") pod \"collect-profiles-29411475-ws87t\" (UID: \"fd3522ae-b165-4ded-80f8-e2169bd7036b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.339551 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tkrr\" (UniqueName: \"kubernetes.io/projected/fd3522ae-b165-4ded-80f8-e2169bd7036b-kube-api-access-8tkrr\") pod \"collect-profiles-29411475-ws87t\" (UID: \"fd3522ae-b165-4ded-80f8-e2169bd7036b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.339959 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fd3522ae-b165-4ded-80f8-e2169bd7036b-config-volume\") pod \"collect-profiles-29411475-ws87t\" (UID: \"fd3522ae-b165-4ded-80f8-e2169bd7036b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.441825 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fd3522ae-b165-4ded-80f8-e2169bd7036b-secret-volume\") pod \"collect-profiles-29411475-ws87t\" (UID: \"fd3522ae-b165-4ded-80f8-e2169bd7036b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.441897 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tkrr\" (UniqueName: \"kubernetes.io/projected/fd3522ae-b165-4ded-80f8-e2169bd7036b-kube-api-access-8tkrr\") pod \"collect-profiles-29411475-ws87t\" (UID: \"fd3522ae-b165-4ded-80f8-e2169bd7036b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.441977 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fd3522ae-b165-4ded-80f8-e2169bd7036b-config-volume\") pod \"collect-profiles-29411475-ws87t\" (UID: \"fd3522ae-b165-4ded-80f8-e2169bd7036b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.444556 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fd3522ae-b165-4ded-80f8-e2169bd7036b-config-volume\") pod \"collect-profiles-29411475-ws87t\" (UID: \"fd3522ae-b165-4ded-80f8-e2169bd7036b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.451451 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fd3522ae-b165-4ded-80f8-e2169bd7036b-secret-volume\") pod \"collect-profiles-29411475-ws87t\" (UID: \"fd3522ae-b165-4ded-80f8-e2169bd7036b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.469003 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tkrr\" (UniqueName: \"kubernetes.io/projected/fd3522ae-b165-4ded-80f8-e2169bd7036b-kube-api-access-8tkrr\") pod \"collect-profiles-29411475-ws87t\" (UID: \"fd3522ae-b165-4ded-80f8-e2169bd7036b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" Dec 02 15:15:00 crc kubenswrapper[4902]: I1202 15:15:00.520332 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" Dec 02 15:15:01 crc kubenswrapper[4902]: I1202 15:15:01.003878 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t"] Dec 02 15:15:01 crc kubenswrapper[4902]: I1202 15:15:01.639358 4902 generic.go:334] "Generic (PLEG): container finished" podID="fd3522ae-b165-4ded-80f8-e2169bd7036b" containerID="873e9cbbca9e4d718b2424297d498ab66d126a441e0edd798bc976d15f747819" exitCode=0 Dec 02 15:15:01 crc kubenswrapper[4902]: I1202 15:15:01.639457 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" event={"ID":"fd3522ae-b165-4ded-80f8-e2169bd7036b","Type":"ContainerDied","Data":"873e9cbbca9e4d718b2424297d498ab66d126a441e0edd798bc976d15f747819"} Dec 02 15:15:01 crc kubenswrapper[4902]: I1202 15:15:01.639914 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" event={"ID":"fd3522ae-b165-4ded-80f8-e2169bd7036b","Type":"ContainerStarted","Data":"4dba98f1c558e88e174f52c019939cd48b8a20fae3656559290ea1cfaea451f1"} Dec 02 15:15:03 crc kubenswrapper[4902]: I1202 15:15:03.086085 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" Dec 02 15:15:03 crc kubenswrapper[4902]: I1202 15:15:03.220608 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tkrr\" (UniqueName: \"kubernetes.io/projected/fd3522ae-b165-4ded-80f8-e2169bd7036b-kube-api-access-8tkrr\") pod \"fd3522ae-b165-4ded-80f8-e2169bd7036b\" (UID: \"fd3522ae-b165-4ded-80f8-e2169bd7036b\") " Dec 02 15:15:03 crc kubenswrapper[4902]: I1202 15:15:03.220695 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fd3522ae-b165-4ded-80f8-e2169bd7036b-config-volume\") pod \"fd3522ae-b165-4ded-80f8-e2169bd7036b\" (UID: \"fd3522ae-b165-4ded-80f8-e2169bd7036b\") " Dec 02 15:15:03 crc kubenswrapper[4902]: I1202 15:15:03.220756 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fd3522ae-b165-4ded-80f8-e2169bd7036b-secret-volume\") pod \"fd3522ae-b165-4ded-80f8-e2169bd7036b\" (UID: \"fd3522ae-b165-4ded-80f8-e2169bd7036b\") " Dec 02 15:15:03 crc kubenswrapper[4902]: I1202 15:15:03.223190 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd3522ae-b165-4ded-80f8-e2169bd7036b-config-volume" (OuterVolumeSpecName: "config-volume") pod "fd3522ae-b165-4ded-80f8-e2169bd7036b" (UID: "fd3522ae-b165-4ded-80f8-e2169bd7036b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 15:15:03 crc kubenswrapper[4902]: I1202 15:15:03.226594 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd3522ae-b165-4ded-80f8-e2169bd7036b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fd3522ae-b165-4ded-80f8-e2169bd7036b" (UID: "fd3522ae-b165-4ded-80f8-e2169bd7036b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:15:03 crc kubenswrapper[4902]: I1202 15:15:03.228544 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd3522ae-b165-4ded-80f8-e2169bd7036b-kube-api-access-8tkrr" (OuterVolumeSpecName: "kube-api-access-8tkrr") pod "fd3522ae-b165-4ded-80f8-e2169bd7036b" (UID: "fd3522ae-b165-4ded-80f8-e2169bd7036b"). InnerVolumeSpecName "kube-api-access-8tkrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:15:03 crc kubenswrapper[4902]: I1202 15:15:03.323132 4902 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fd3522ae-b165-4ded-80f8-e2169bd7036b-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 15:15:03 crc kubenswrapper[4902]: I1202 15:15:03.323165 4902 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fd3522ae-b165-4ded-80f8-e2169bd7036b-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 15:15:03 crc kubenswrapper[4902]: I1202 15:15:03.323175 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tkrr\" (UniqueName: \"kubernetes.io/projected/fd3522ae-b165-4ded-80f8-e2169bd7036b-kube-api-access-8tkrr\") on node \"crc\" DevicePath \"\"" Dec 02 15:15:03 crc kubenswrapper[4902]: I1202 15:15:03.665797 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" event={"ID":"fd3522ae-b165-4ded-80f8-e2169bd7036b","Type":"ContainerDied","Data":"4dba98f1c558e88e174f52c019939cd48b8a20fae3656559290ea1cfaea451f1"} Dec 02 15:15:03 crc kubenswrapper[4902]: I1202 15:15:03.666390 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4dba98f1c558e88e174f52c019939cd48b8a20fae3656559290ea1cfaea451f1" Dec 02 15:15:03 crc kubenswrapper[4902]: I1202 15:15:03.665896 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411475-ws87t" Dec 02 15:15:04 crc kubenswrapper[4902]: I1202 15:15:04.162772 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb"] Dec 02 15:15:04 crc kubenswrapper[4902]: I1202 15:15:04.171882 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411430-dx2wb"] Dec 02 15:15:05 crc kubenswrapper[4902]: I1202 15:15:05.131152 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6df3b0b6-51e8-426d-a1e5-b03611c256bb" path="/var/lib/kubelet/pods/6df3b0b6-51e8-426d-a1e5-b03611c256bb/volumes" Dec 02 15:15:33 crc kubenswrapper[4902]: I1202 15:15:33.437809 4902 scope.go:117] "RemoveContainer" containerID="0c2562be63138dd01b943189bdd0c76333b7ec8b843339f727a7a38defe258b7" Dec 02 15:16:04 crc kubenswrapper[4902]: I1202 15:16:04.731241 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:16:04 crc kubenswrapper[4902]: I1202 15:16:04.731938 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:16:34 crc kubenswrapper[4902]: I1202 15:16:34.732430 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:16:34 crc kubenswrapper[4902]: I1202 15:16:34.733255 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:17:04 crc kubenswrapper[4902]: I1202 15:17:04.732073 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:17:04 crc kubenswrapper[4902]: I1202 15:17:04.732927 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:17:04 crc kubenswrapper[4902]: I1202 15:17:04.733010 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 15:17:04 crc kubenswrapper[4902]: I1202 15:17:04.734281 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1b578866bd1e33784f7438ab2d7a99bd871df481e636cf8d3653d5e78e867aa9"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 15:17:04 crc kubenswrapper[4902]: I1202 15:17:04.734391 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://1b578866bd1e33784f7438ab2d7a99bd871df481e636cf8d3653d5e78e867aa9" gracePeriod=600 Dec 02 15:17:05 crc kubenswrapper[4902]: I1202 15:17:05.129107 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="1b578866bd1e33784f7438ab2d7a99bd871df481e636cf8d3653d5e78e867aa9" exitCode=0 Dec 02 15:17:05 crc kubenswrapper[4902]: I1202 15:17:05.131145 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"1b578866bd1e33784f7438ab2d7a99bd871df481e636cf8d3653d5e78e867aa9"} Dec 02 15:17:05 crc kubenswrapper[4902]: I1202 15:17:05.131189 4902 scope.go:117] "RemoveContainer" containerID="97db1b35891f9b3b66146e2ed2da4d82abfc43379fdc8907bfc591f03732ec8b" Dec 02 15:17:06 crc kubenswrapper[4902]: I1202 15:17:06.152888 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c"} Dec 02 15:17:55 crc kubenswrapper[4902]: I1202 15:17:55.894737 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gmbxf"] Dec 02 15:17:55 crc kubenswrapper[4902]: E1202 15:17:55.895803 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd3522ae-b165-4ded-80f8-e2169bd7036b" containerName="collect-profiles" Dec 02 15:17:55 crc kubenswrapper[4902]: I1202 15:17:55.895821 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd3522ae-b165-4ded-80f8-e2169bd7036b" containerName="collect-profiles" Dec 02 15:17:55 crc kubenswrapper[4902]: I1202 15:17:55.896135 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd3522ae-b165-4ded-80f8-e2169bd7036b" containerName="collect-profiles" Dec 02 15:17:55 crc kubenswrapper[4902]: I1202 15:17:55.897946 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:17:55 crc kubenswrapper[4902]: I1202 15:17:55.912242 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gmbxf"] Dec 02 15:17:55 crc kubenswrapper[4902]: I1202 15:17:55.959798 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/809e3427-7725-440c-ba92-654731242f3a-utilities\") pod \"community-operators-gmbxf\" (UID: \"809e3427-7725-440c-ba92-654731242f3a\") " pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:17:55 crc kubenswrapper[4902]: I1202 15:17:55.959844 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/809e3427-7725-440c-ba92-654731242f3a-catalog-content\") pod \"community-operators-gmbxf\" (UID: \"809e3427-7725-440c-ba92-654731242f3a\") " pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:17:55 crc kubenswrapper[4902]: I1202 15:17:55.960274 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq7f4\" (UniqueName: \"kubernetes.io/projected/809e3427-7725-440c-ba92-654731242f3a-kube-api-access-fq7f4\") pod \"community-operators-gmbxf\" (UID: \"809e3427-7725-440c-ba92-654731242f3a\") " pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:17:56 crc kubenswrapper[4902]: I1202 15:17:56.062426 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/809e3427-7725-440c-ba92-654731242f3a-utilities\") pod \"community-operators-gmbxf\" (UID: \"809e3427-7725-440c-ba92-654731242f3a\") " pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:17:56 crc kubenswrapper[4902]: I1202 15:17:56.062474 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/809e3427-7725-440c-ba92-654731242f3a-catalog-content\") pod \"community-operators-gmbxf\" (UID: \"809e3427-7725-440c-ba92-654731242f3a\") " pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:17:56 crc kubenswrapper[4902]: I1202 15:17:56.062613 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq7f4\" (UniqueName: \"kubernetes.io/projected/809e3427-7725-440c-ba92-654731242f3a-kube-api-access-fq7f4\") pod \"community-operators-gmbxf\" (UID: \"809e3427-7725-440c-ba92-654731242f3a\") " pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:17:56 crc kubenswrapper[4902]: I1202 15:17:56.063872 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/809e3427-7725-440c-ba92-654731242f3a-utilities\") pod \"community-operators-gmbxf\" (UID: \"809e3427-7725-440c-ba92-654731242f3a\") " pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:17:56 crc kubenswrapper[4902]: I1202 15:17:56.063881 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/809e3427-7725-440c-ba92-654731242f3a-catalog-content\") pod \"community-operators-gmbxf\" (UID: \"809e3427-7725-440c-ba92-654731242f3a\") " pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:17:56 crc kubenswrapper[4902]: I1202 15:17:56.084075 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq7f4\" (UniqueName: \"kubernetes.io/projected/809e3427-7725-440c-ba92-654731242f3a-kube-api-access-fq7f4\") pod \"community-operators-gmbxf\" (UID: \"809e3427-7725-440c-ba92-654731242f3a\") " pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:17:56 crc kubenswrapper[4902]: I1202 15:17:56.250119 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:17:56 crc kubenswrapper[4902]: I1202 15:17:56.874059 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gmbxf"] Dec 02 15:17:57 crc kubenswrapper[4902]: I1202 15:17:57.716835 4902 generic.go:334] "Generic (PLEG): container finished" podID="809e3427-7725-440c-ba92-654731242f3a" containerID="7223eeb5a2777e0a6cf6631a11529065055a47867f68a585044b1ec7e0959ef4" exitCode=0 Dec 02 15:17:57 crc kubenswrapper[4902]: I1202 15:17:57.717152 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gmbxf" event={"ID":"809e3427-7725-440c-ba92-654731242f3a","Type":"ContainerDied","Data":"7223eeb5a2777e0a6cf6631a11529065055a47867f68a585044b1ec7e0959ef4"} Dec 02 15:17:57 crc kubenswrapper[4902]: I1202 15:17:57.717184 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gmbxf" event={"ID":"809e3427-7725-440c-ba92-654731242f3a","Type":"ContainerStarted","Data":"dee1c1309d107f56cad6dd7290ac9953daf0d37c83536862a442948b996edc63"} Dec 02 15:17:57 crc kubenswrapper[4902]: I1202 15:17:57.719921 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 15:17:58 crc kubenswrapper[4902]: I1202 15:17:58.735255 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gmbxf" event={"ID":"809e3427-7725-440c-ba92-654731242f3a","Type":"ContainerStarted","Data":"6d241492a464895b782cc70dbeeed6808e0b44567fdfaebe43555dd96cbe063f"} Dec 02 15:18:00 crc kubenswrapper[4902]: I1202 15:18:00.762100 4902 generic.go:334] "Generic (PLEG): container finished" podID="809e3427-7725-440c-ba92-654731242f3a" containerID="6d241492a464895b782cc70dbeeed6808e0b44567fdfaebe43555dd96cbe063f" exitCode=0 Dec 02 15:18:00 crc kubenswrapper[4902]: I1202 15:18:00.762208 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gmbxf" event={"ID":"809e3427-7725-440c-ba92-654731242f3a","Type":"ContainerDied","Data":"6d241492a464895b782cc70dbeeed6808e0b44567fdfaebe43555dd96cbe063f"} Dec 02 15:18:01 crc kubenswrapper[4902]: I1202 15:18:01.776109 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gmbxf" event={"ID":"809e3427-7725-440c-ba92-654731242f3a","Type":"ContainerStarted","Data":"fd1d2a4d0d247679e3e4e6e22c36ac7f95b4dde5560d304a66c279e005496b7e"} Dec 02 15:18:01 crc kubenswrapper[4902]: I1202 15:18:01.805900 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gmbxf" podStartSLOduration=3.183116429 podStartE2EDuration="6.805878432s" podCreationTimestamp="2025-12-02 15:17:55 +0000 UTC" firstStartedPulling="2025-12-02 15:17:57.719660598 +0000 UTC m=+3708.910969327" lastFinishedPulling="2025-12-02 15:18:01.342422621 +0000 UTC m=+3712.533731330" observedRunningTime="2025-12-02 15:18:01.793613664 +0000 UTC m=+3712.984922383" watchObservedRunningTime="2025-12-02 15:18:01.805878432 +0000 UTC m=+3712.997187141" Dec 02 15:18:06 crc kubenswrapper[4902]: I1202 15:18:06.250917 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:18:06 crc kubenswrapper[4902]: I1202 15:18:06.251525 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:18:06 crc kubenswrapper[4902]: I1202 15:18:06.316484 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:18:06 crc kubenswrapper[4902]: I1202 15:18:06.913803 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:18:06 crc kubenswrapper[4902]: I1202 15:18:06.970768 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gmbxf"] Dec 02 15:18:08 crc kubenswrapper[4902]: I1202 15:18:08.856041 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gmbxf" podUID="809e3427-7725-440c-ba92-654731242f3a" containerName="registry-server" containerID="cri-o://fd1d2a4d0d247679e3e4e6e22c36ac7f95b4dde5560d304a66c279e005496b7e" gracePeriod=2 Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.376369 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.449462 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/809e3427-7725-440c-ba92-654731242f3a-utilities\") pod \"809e3427-7725-440c-ba92-654731242f3a\" (UID: \"809e3427-7725-440c-ba92-654731242f3a\") " Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.449579 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/809e3427-7725-440c-ba92-654731242f3a-catalog-content\") pod \"809e3427-7725-440c-ba92-654731242f3a\" (UID: \"809e3427-7725-440c-ba92-654731242f3a\") " Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.449670 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fq7f4\" (UniqueName: \"kubernetes.io/projected/809e3427-7725-440c-ba92-654731242f3a-kube-api-access-fq7f4\") pod \"809e3427-7725-440c-ba92-654731242f3a\" (UID: \"809e3427-7725-440c-ba92-654731242f3a\") " Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.451356 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/809e3427-7725-440c-ba92-654731242f3a-utilities" (OuterVolumeSpecName: "utilities") pod "809e3427-7725-440c-ba92-654731242f3a" (UID: "809e3427-7725-440c-ba92-654731242f3a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.462769 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/809e3427-7725-440c-ba92-654731242f3a-kube-api-access-fq7f4" (OuterVolumeSpecName: "kube-api-access-fq7f4") pod "809e3427-7725-440c-ba92-654731242f3a" (UID: "809e3427-7725-440c-ba92-654731242f3a"). InnerVolumeSpecName "kube-api-access-fq7f4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.497738 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/809e3427-7725-440c-ba92-654731242f3a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "809e3427-7725-440c-ba92-654731242f3a" (UID: "809e3427-7725-440c-ba92-654731242f3a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.552624 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/809e3427-7725-440c-ba92-654731242f3a-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.552684 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/809e3427-7725-440c-ba92-654731242f3a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.552711 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fq7f4\" (UniqueName: \"kubernetes.io/projected/809e3427-7725-440c-ba92-654731242f3a-kube-api-access-fq7f4\") on node \"crc\" DevicePath \"\"" Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.884184 4902 generic.go:334] "Generic (PLEG): container finished" podID="809e3427-7725-440c-ba92-654731242f3a" containerID="fd1d2a4d0d247679e3e4e6e22c36ac7f95b4dde5560d304a66c279e005496b7e" exitCode=0 Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.884532 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gmbxf" event={"ID":"809e3427-7725-440c-ba92-654731242f3a","Type":"ContainerDied","Data":"fd1d2a4d0d247679e3e4e6e22c36ac7f95b4dde5560d304a66c279e005496b7e"} Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.884631 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gmbxf" event={"ID":"809e3427-7725-440c-ba92-654731242f3a","Type":"ContainerDied","Data":"dee1c1309d107f56cad6dd7290ac9953daf0d37c83536862a442948b996edc63"} Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.884664 4902 scope.go:117] "RemoveContainer" containerID="fd1d2a4d0d247679e3e4e6e22c36ac7f95b4dde5560d304a66c279e005496b7e" Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.884896 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gmbxf" Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.936820 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gmbxf"] Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.948141 4902 scope.go:117] "RemoveContainer" containerID="6d241492a464895b782cc70dbeeed6808e0b44567fdfaebe43555dd96cbe063f" Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.952966 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gmbxf"] Dec 02 15:18:09 crc kubenswrapper[4902]: I1202 15:18:09.978829 4902 scope.go:117] "RemoveContainer" containerID="7223eeb5a2777e0a6cf6631a11529065055a47867f68a585044b1ec7e0959ef4" Dec 02 15:18:10 crc kubenswrapper[4902]: I1202 15:18:10.034463 4902 scope.go:117] "RemoveContainer" containerID="fd1d2a4d0d247679e3e4e6e22c36ac7f95b4dde5560d304a66c279e005496b7e" Dec 02 15:18:10 crc kubenswrapper[4902]: E1202 15:18:10.035045 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd1d2a4d0d247679e3e4e6e22c36ac7f95b4dde5560d304a66c279e005496b7e\": container with ID starting with fd1d2a4d0d247679e3e4e6e22c36ac7f95b4dde5560d304a66c279e005496b7e not found: ID does not exist" containerID="fd1d2a4d0d247679e3e4e6e22c36ac7f95b4dde5560d304a66c279e005496b7e" Dec 02 15:18:10 crc kubenswrapper[4902]: I1202 15:18:10.035107 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd1d2a4d0d247679e3e4e6e22c36ac7f95b4dde5560d304a66c279e005496b7e"} err="failed to get container status \"fd1d2a4d0d247679e3e4e6e22c36ac7f95b4dde5560d304a66c279e005496b7e\": rpc error: code = NotFound desc = could not find container \"fd1d2a4d0d247679e3e4e6e22c36ac7f95b4dde5560d304a66c279e005496b7e\": container with ID starting with fd1d2a4d0d247679e3e4e6e22c36ac7f95b4dde5560d304a66c279e005496b7e not found: ID does not exist" Dec 02 15:18:10 crc kubenswrapper[4902]: I1202 15:18:10.035135 4902 scope.go:117] "RemoveContainer" containerID="6d241492a464895b782cc70dbeeed6808e0b44567fdfaebe43555dd96cbe063f" Dec 02 15:18:10 crc kubenswrapper[4902]: E1202 15:18:10.035807 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d241492a464895b782cc70dbeeed6808e0b44567fdfaebe43555dd96cbe063f\": container with ID starting with 6d241492a464895b782cc70dbeeed6808e0b44567fdfaebe43555dd96cbe063f not found: ID does not exist" containerID="6d241492a464895b782cc70dbeeed6808e0b44567fdfaebe43555dd96cbe063f" Dec 02 15:18:10 crc kubenswrapper[4902]: I1202 15:18:10.035836 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d241492a464895b782cc70dbeeed6808e0b44567fdfaebe43555dd96cbe063f"} err="failed to get container status \"6d241492a464895b782cc70dbeeed6808e0b44567fdfaebe43555dd96cbe063f\": rpc error: code = NotFound desc = could not find container \"6d241492a464895b782cc70dbeeed6808e0b44567fdfaebe43555dd96cbe063f\": container with ID starting with 6d241492a464895b782cc70dbeeed6808e0b44567fdfaebe43555dd96cbe063f not found: ID does not exist" Dec 02 15:18:10 crc kubenswrapper[4902]: I1202 15:18:10.035853 4902 scope.go:117] "RemoveContainer" containerID="7223eeb5a2777e0a6cf6631a11529065055a47867f68a585044b1ec7e0959ef4" Dec 02 15:18:10 crc kubenswrapper[4902]: E1202 15:18:10.036156 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7223eeb5a2777e0a6cf6631a11529065055a47867f68a585044b1ec7e0959ef4\": container with ID starting with 7223eeb5a2777e0a6cf6631a11529065055a47867f68a585044b1ec7e0959ef4 not found: ID does not exist" containerID="7223eeb5a2777e0a6cf6631a11529065055a47867f68a585044b1ec7e0959ef4" Dec 02 15:18:10 crc kubenswrapper[4902]: I1202 15:18:10.036185 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7223eeb5a2777e0a6cf6631a11529065055a47867f68a585044b1ec7e0959ef4"} err="failed to get container status \"7223eeb5a2777e0a6cf6631a11529065055a47867f68a585044b1ec7e0959ef4\": rpc error: code = NotFound desc = could not find container \"7223eeb5a2777e0a6cf6631a11529065055a47867f68a585044b1ec7e0959ef4\": container with ID starting with 7223eeb5a2777e0a6cf6631a11529065055a47867f68a585044b1ec7e0959ef4 not found: ID does not exist" Dec 02 15:18:11 crc kubenswrapper[4902]: I1202 15:18:11.117064 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="809e3427-7725-440c-ba92-654731242f3a" path="/var/lib/kubelet/pods/809e3427-7725-440c-ba92-654731242f3a/volumes" Dec 02 15:19:34 crc kubenswrapper[4902]: I1202 15:19:34.731505 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:19:34 crc kubenswrapper[4902]: I1202 15:19:34.732101 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:20:04 crc kubenswrapper[4902]: I1202 15:20:04.732193 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:20:04 crc kubenswrapper[4902]: I1202 15:20:04.732874 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:20:34 crc kubenswrapper[4902]: I1202 15:20:34.731732 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:20:34 crc kubenswrapper[4902]: I1202 15:20:34.732764 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:20:34 crc kubenswrapper[4902]: I1202 15:20:34.732848 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 15:20:34 crc kubenswrapper[4902]: I1202 15:20:34.734245 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 15:20:34 crc kubenswrapper[4902]: I1202 15:20:34.734323 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" gracePeriod=600 Dec 02 15:20:34 crc kubenswrapper[4902]: E1202 15:20:34.870436 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:20:35 crc kubenswrapper[4902]: I1202 15:20:35.661525 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" exitCode=0 Dec 02 15:20:35 crc kubenswrapper[4902]: I1202 15:20:35.661641 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c"} Dec 02 15:20:35 crc kubenswrapper[4902]: I1202 15:20:35.661998 4902 scope.go:117] "RemoveContainer" containerID="1b578866bd1e33784f7438ab2d7a99bd871df481e636cf8d3653d5e78e867aa9" Dec 02 15:20:35 crc kubenswrapper[4902]: I1202 15:20:35.663064 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:20:35 crc kubenswrapper[4902]: E1202 15:20:35.663685 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:20:51 crc kubenswrapper[4902]: I1202 15:20:51.107158 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:20:51 crc kubenswrapper[4902]: E1202 15:20:51.108650 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:21:05 crc kubenswrapper[4902]: I1202 15:21:05.107858 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:21:05 crc kubenswrapper[4902]: E1202 15:21:05.109126 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:21:19 crc kubenswrapper[4902]: I1202 15:21:19.112295 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:21:19 crc kubenswrapper[4902]: E1202 15:21:19.113104 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:21:30 crc kubenswrapper[4902]: I1202 15:21:30.107377 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:21:30 crc kubenswrapper[4902]: E1202 15:21:30.108292 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:21:41 crc kubenswrapper[4902]: I1202 15:21:41.107433 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:21:41 crc kubenswrapper[4902]: E1202 15:21:41.108204 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:21:56 crc kubenswrapper[4902]: I1202 15:21:56.107193 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:21:56 crc kubenswrapper[4902]: E1202 15:21:56.108135 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:22:08 crc kubenswrapper[4902]: I1202 15:22:08.107861 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:22:08 crc kubenswrapper[4902]: E1202 15:22:08.108836 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:22:20 crc kubenswrapper[4902]: I1202 15:22:20.107156 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:22:20 crc kubenswrapper[4902]: E1202 15:22:20.108493 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:22:33 crc kubenswrapper[4902]: I1202 15:22:33.108323 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:22:33 crc kubenswrapper[4902]: E1202 15:22:33.109382 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:22:48 crc kubenswrapper[4902]: I1202 15:22:48.107558 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:22:48 crc kubenswrapper[4902]: E1202 15:22:48.108840 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:23:00 crc kubenswrapper[4902]: I1202 15:23:00.977360 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-l9zdq"] Dec 02 15:23:00 crc kubenswrapper[4902]: E1202 15:23:00.978328 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809e3427-7725-440c-ba92-654731242f3a" containerName="extract-utilities" Dec 02 15:23:00 crc kubenswrapper[4902]: I1202 15:23:00.978342 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="809e3427-7725-440c-ba92-654731242f3a" containerName="extract-utilities" Dec 02 15:23:00 crc kubenswrapper[4902]: E1202 15:23:00.978361 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809e3427-7725-440c-ba92-654731242f3a" containerName="extract-content" Dec 02 15:23:00 crc kubenswrapper[4902]: I1202 15:23:00.978368 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="809e3427-7725-440c-ba92-654731242f3a" containerName="extract-content" Dec 02 15:23:00 crc kubenswrapper[4902]: E1202 15:23:00.978384 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809e3427-7725-440c-ba92-654731242f3a" containerName="registry-server" Dec 02 15:23:00 crc kubenswrapper[4902]: I1202 15:23:00.978392 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="809e3427-7725-440c-ba92-654731242f3a" containerName="registry-server" Dec 02 15:23:00 crc kubenswrapper[4902]: I1202 15:23:00.978600 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="809e3427-7725-440c-ba92-654731242f3a" containerName="registry-server" Dec 02 15:23:00 crc kubenswrapper[4902]: I1202 15:23:00.980012 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:00 crc kubenswrapper[4902]: I1202 15:23:00.990937 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l9zdq"] Dec 02 15:23:01 crc kubenswrapper[4902]: I1202 15:23:01.089435 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4ffe681-7c06-448d-b01d-f105fca1265c-utilities\") pod \"redhat-marketplace-l9zdq\" (UID: \"d4ffe681-7c06-448d-b01d-f105fca1265c\") " pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:01 crc kubenswrapper[4902]: I1202 15:23:01.089682 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4ffe681-7c06-448d-b01d-f105fca1265c-catalog-content\") pod \"redhat-marketplace-l9zdq\" (UID: \"d4ffe681-7c06-448d-b01d-f105fca1265c\") " pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:01 crc kubenswrapper[4902]: I1202 15:23:01.089739 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7pj9\" (UniqueName: \"kubernetes.io/projected/d4ffe681-7c06-448d-b01d-f105fca1265c-kube-api-access-d7pj9\") pod \"redhat-marketplace-l9zdq\" (UID: \"d4ffe681-7c06-448d-b01d-f105fca1265c\") " pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:01 crc kubenswrapper[4902]: I1202 15:23:01.191234 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4ffe681-7c06-448d-b01d-f105fca1265c-utilities\") pod \"redhat-marketplace-l9zdq\" (UID: \"d4ffe681-7c06-448d-b01d-f105fca1265c\") " pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:01 crc kubenswrapper[4902]: I1202 15:23:01.191425 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4ffe681-7c06-448d-b01d-f105fca1265c-catalog-content\") pod \"redhat-marketplace-l9zdq\" (UID: \"d4ffe681-7c06-448d-b01d-f105fca1265c\") " pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:01 crc kubenswrapper[4902]: I1202 15:23:01.191480 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7pj9\" (UniqueName: \"kubernetes.io/projected/d4ffe681-7c06-448d-b01d-f105fca1265c-kube-api-access-d7pj9\") pod \"redhat-marketplace-l9zdq\" (UID: \"d4ffe681-7c06-448d-b01d-f105fca1265c\") " pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:01 crc kubenswrapper[4902]: I1202 15:23:01.192261 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4ffe681-7c06-448d-b01d-f105fca1265c-utilities\") pod \"redhat-marketplace-l9zdq\" (UID: \"d4ffe681-7c06-448d-b01d-f105fca1265c\") " pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:01 crc kubenswrapper[4902]: I1202 15:23:01.192346 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4ffe681-7c06-448d-b01d-f105fca1265c-catalog-content\") pod \"redhat-marketplace-l9zdq\" (UID: \"d4ffe681-7c06-448d-b01d-f105fca1265c\") " pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:01 crc kubenswrapper[4902]: I1202 15:23:01.430365 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7pj9\" (UniqueName: \"kubernetes.io/projected/d4ffe681-7c06-448d-b01d-f105fca1265c-kube-api-access-d7pj9\") pod \"redhat-marketplace-l9zdq\" (UID: \"d4ffe681-7c06-448d-b01d-f105fca1265c\") " pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:01 crc kubenswrapper[4902]: I1202 15:23:01.599523 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:02 crc kubenswrapper[4902]: I1202 15:23:02.565589 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l9zdq"] Dec 02 15:23:03 crc kubenswrapper[4902]: I1202 15:23:03.108130 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:23:03 crc kubenswrapper[4902]: E1202 15:23:03.108790 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:23:03 crc kubenswrapper[4902]: I1202 15:23:03.243744 4902 generic.go:334] "Generic (PLEG): container finished" podID="d4ffe681-7c06-448d-b01d-f105fca1265c" containerID="d79e21e3fb7d2d3dab327bbf73b697f6a5fb4b16d0ca87164dc5ca865e34f52e" exitCode=0 Dec 02 15:23:03 crc kubenswrapper[4902]: I1202 15:23:03.243808 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l9zdq" event={"ID":"d4ffe681-7c06-448d-b01d-f105fca1265c","Type":"ContainerDied","Data":"d79e21e3fb7d2d3dab327bbf73b697f6a5fb4b16d0ca87164dc5ca865e34f52e"} Dec 02 15:23:03 crc kubenswrapper[4902]: I1202 15:23:03.243853 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l9zdq" event={"ID":"d4ffe681-7c06-448d-b01d-f105fca1265c","Type":"ContainerStarted","Data":"7474f14a2a5408cc144cc6b633036b796547b00b4d4b10bbf2cf5ffbd0ad385c"} Dec 02 15:23:03 crc kubenswrapper[4902]: I1202 15:23:03.246490 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 15:23:05 crc kubenswrapper[4902]: I1202 15:23:05.262824 4902 generic.go:334] "Generic (PLEG): container finished" podID="d4ffe681-7c06-448d-b01d-f105fca1265c" containerID="43ae4598be8253e05ca8132a63c5174f75cf90388471c3e2e90bdac5e06c1974" exitCode=0 Dec 02 15:23:05 crc kubenswrapper[4902]: I1202 15:23:05.262863 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l9zdq" event={"ID":"d4ffe681-7c06-448d-b01d-f105fca1265c","Type":"ContainerDied","Data":"43ae4598be8253e05ca8132a63c5174f75cf90388471c3e2e90bdac5e06c1974"} Dec 02 15:23:06 crc kubenswrapper[4902]: I1202 15:23:06.294152 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l9zdq" event={"ID":"d4ffe681-7c06-448d-b01d-f105fca1265c","Type":"ContainerStarted","Data":"759d44a00750ace5e112c9cd606096970199454fe9c25acbf76759d3f5908496"} Dec 02 15:23:06 crc kubenswrapper[4902]: I1202 15:23:06.314108 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-l9zdq" podStartSLOduration=3.760192606 podStartE2EDuration="6.314087608s" podCreationTimestamp="2025-12-02 15:23:00 +0000 UTC" firstStartedPulling="2025-12-02 15:23:03.246064872 +0000 UTC m=+4014.437373601" lastFinishedPulling="2025-12-02 15:23:05.799959884 +0000 UTC m=+4016.991268603" observedRunningTime="2025-12-02 15:23:06.313415909 +0000 UTC m=+4017.504724618" watchObservedRunningTime="2025-12-02 15:23:06.314087608 +0000 UTC m=+4017.505396327" Dec 02 15:23:11 crc kubenswrapper[4902]: I1202 15:23:11.600186 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:11 crc kubenswrapper[4902]: I1202 15:23:11.601837 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:11 crc kubenswrapper[4902]: I1202 15:23:11.685325 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:12 crc kubenswrapper[4902]: I1202 15:23:12.425521 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:12 crc kubenswrapper[4902]: I1202 15:23:12.482314 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l9zdq"] Dec 02 15:23:14 crc kubenswrapper[4902]: I1202 15:23:14.373109 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-l9zdq" podUID="d4ffe681-7c06-448d-b01d-f105fca1265c" containerName="registry-server" containerID="cri-o://759d44a00750ace5e112c9cd606096970199454fe9c25acbf76759d3f5908496" gracePeriod=2 Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.042769 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.239096 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4ffe681-7c06-448d-b01d-f105fca1265c-utilities\") pod \"d4ffe681-7c06-448d-b01d-f105fca1265c\" (UID: \"d4ffe681-7c06-448d-b01d-f105fca1265c\") " Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.239257 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4ffe681-7c06-448d-b01d-f105fca1265c-catalog-content\") pod \"d4ffe681-7c06-448d-b01d-f105fca1265c\" (UID: \"d4ffe681-7c06-448d-b01d-f105fca1265c\") " Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.239376 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7pj9\" (UniqueName: \"kubernetes.io/projected/d4ffe681-7c06-448d-b01d-f105fca1265c-kube-api-access-d7pj9\") pod \"d4ffe681-7c06-448d-b01d-f105fca1265c\" (UID: \"d4ffe681-7c06-448d-b01d-f105fca1265c\") " Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.240414 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4ffe681-7c06-448d-b01d-f105fca1265c-utilities" (OuterVolumeSpecName: "utilities") pod "d4ffe681-7c06-448d-b01d-f105fca1265c" (UID: "d4ffe681-7c06-448d-b01d-f105fca1265c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.246203 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4ffe681-7c06-448d-b01d-f105fca1265c-kube-api-access-d7pj9" (OuterVolumeSpecName: "kube-api-access-d7pj9") pod "d4ffe681-7c06-448d-b01d-f105fca1265c" (UID: "d4ffe681-7c06-448d-b01d-f105fca1265c"). InnerVolumeSpecName "kube-api-access-d7pj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.269419 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4ffe681-7c06-448d-b01d-f105fca1265c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d4ffe681-7c06-448d-b01d-f105fca1265c" (UID: "d4ffe681-7c06-448d-b01d-f105fca1265c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.342385 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4ffe681-7c06-448d-b01d-f105fca1265c-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.342416 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4ffe681-7c06-448d-b01d-f105fca1265c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.342437 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7pj9\" (UniqueName: \"kubernetes.io/projected/d4ffe681-7c06-448d-b01d-f105fca1265c-kube-api-access-d7pj9\") on node \"crc\" DevicePath \"\"" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.391007 4902 generic.go:334] "Generic (PLEG): container finished" podID="d4ffe681-7c06-448d-b01d-f105fca1265c" containerID="759d44a00750ace5e112c9cd606096970199454fe9c25acbf76759d3f5908496" exitCode=0 Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.391046 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l9zdq" event={"ID":"d4ffe681-7c06-448d-b01d-f105fca1265c","Type":"ContainerDied","Data":"759d44a00750ace5e112c9cd606096970199454fe9c25acbf76759d3f5908496"} Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.391073 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l9zdq" event={"ID":"d4ffe681-7c06-448d-b01d-f105fca1265c","Type":"ContainerDied","Data":"7474f14a2a5408cc144cc6b633036b796547b00b4d4b10bbf2cf5ffbd0ad385c"} Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.391089 4902 scope.go:117] "RemoveContainer" containerID="759d44a00750ace5e112c9cd606096970199454fe9c25acbf76759d3f5908496" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.393243 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l9zdq" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.429963 4902 scope.go:117] "RemoveContainer" containerID="43ae4598be8253e05ca8132a63c5174f75cf90388471c3e2e90bdac5e06c1974" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.452851 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l9zdq"] Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.470345 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-l9zdq"] Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.483189 4902 scope.go:117] "RemoveContainer" containerID="d79e21e3fb7d2d3dab327bbf73b697f6a5fb4b16d0ca87164dc5ca865e34f52e" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.535736 4902 scope.go:117] "RemoveContainer" containerID="759d44a00750ace5e112c9cd606096970199454fe9c25acbf76759d3f5908496" Dec 02 15:23:15 crc kubenswrapper[4902]: E1202 15:23:15.536295 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"759d44a00750ace5e112c9cd606096970199454fe9c25acbf76759d3f5908496\": container with ID starting with 759d44a00750ace5e112c9cd606096970199454fe9c25acbf76759d3f5908496 not found: ID does not exist" containerID="759d44a00750ace5e112c9cd606096970199454fe9c25acbf76759d3f5908496" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.536347 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"759d44a00750ace5e112c9cd606096970199454fe9c25acbf76759d3f5908496"} err="failed to get container status \"759d44a00750ace5e112c9cd606096970199454fe9c25acbf76759d3f5908496\": rpc error: code = NotFound desc = could not find container \"759d44a00750ace5e112c9cd606096970199454fe9c25acbf76759d3f5908496\": container with ID starting with 759d44a00750ace5e112c9cd606096970199454fe9c25acbf76759d3f5908496 not found: ID does not exist" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.536382 4902 scope.go:117] "RemoveContainer" containerID="43ae4598be8253e05ca8132a63c5174f75cf90388471c3e2e90bdac5e06c1974" Dec 02 15:23:15 crc kubenswrapper[4902]: E1202 15:23:15.536746 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43ae4598be8253e05ca8132a63c5174f75cf90388471c3e2e90bdac5e06c1974\": container with ID starting with 43ae4598be8253e05ca8132a63c5174f75cf90388471c3e2e90bdac5e06c1974 not found: ID does not exist" containerID="43ae4598be8253e05ca8132a63c5174f75cf90388471c3e2e90bdac5e06c1974" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.536774 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43ae4598be8253e05ca8132a63c5174f75cf90388471c3e2e90bdac5e06c1974"} err="failed to get container status \"43ae4598be8253e05ca8132a63c5174f75cf90388471c3e2e90bdac5e06c1974\": rpc error: code = NotFound desc = could not find container \"43ae4598be8253e05ca8132a63c5174f75cf90388471c3e2e90bdac5e06c1974\": container with ID starting with 43ae4598be8253e05ca8132a63c5174f75cf90388471c3e2e90bdac5e06c1974 not found: ID does not exist" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.536793 4902 scope.go:117] "RemoveContainer" containerID="d79e21e3fb7d2d3dab327bbf73b697f6a5fb4b16d0ca87164dc5ca865e34f52e" Dec 02 15:23:15 crc kubenswrapper[4902]: E1202 15:23:15.537072 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d79e21e3fb7d2d3dab327bbf73b697f6a5fb4b16d0ca87164dc5ca865e34f52e\": container with ID starting with d79e21e3fb7d2d3dab327bbf73b697f6a5fb4b16d0ca87164dc5ca865e34f52e not found: ID does not exist" containerID="d79e21e3fb7d2d3dab327bbf73b697f6a5fb4b16d0ca87164dc5ca865e34f52e" Dec 02 15:23:15 crc kubenswrapper[4902]: I1202 15:23:15.537098 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d79e21e3fb7d2d3dab327bbf73b697f6a5fb4b16d0ca87164dc5ca865e34f52e"} err="failed to get container status \"d79e21e3fb7d2d3dab327bbf73b697f6a5fb4b16d0ca87164dc5ca865e34f52e\": rpc error: code = NotFound desc = could not find container \"d79e21e3fb7d2d3dab327bbf73b697f6a5fb4b16d0ca87164dc5ca865e34f52e\": container with ID starting with d79e21e3fb7d2d3dab327bbf73b697f6a5fb4b16d0ca87164dc5ca865e34f52e not found: ID does not exist" Dec 02 15:23:17 crc kubenswrapper[4902]: I1202 15:23:17.118745 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4ffe681-7c06-448d-b01d-f105fca1265c" path="/var/lib/kubelet/pods/d4ffe681-7c06-448d-b01d-f105fca1265c/volumes" Dec 02 15:23:18 crc kubenswrapper[4902]: I1202 15:23:18.107612 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:23:18 crc kubenswrapper[4902]: E1202 15:23:18.108952 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:23:29 crc kubenswrapper[4902]: I1202 15:23:29.132250 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:23:29 crc kubenswrapper[4902]: E1202 15:23:29.133650 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:23:40 crc kubenswrapper[4902]: I1202 15:23:40.107277 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:23:40 crc kubenswrapper[4902]: E1202 15:23:40.108198 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:23:54 crc kubenswrapper[4902]: I1202 15:23:54.107274 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:23:54 crc kubenswrapper[4902]: E1202 15:23:54.110489 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:24:05 crc kubenswrapper[4902]: I1202 15:24:05.106703 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:24:05 crc kubenswrapper[4902]: E1202 15:24:05.107552 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:24:19 crc kubenswrapper[4902]: I1202 15:24:19.122964 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:24:19 crc kubenswrapper[4902]: E1202 15:24:19.123677 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:24:31 crc kubenswrapper[4902]: I1202 15:24:31.107702 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:24:31 crc kubenswrapper[4902]: E1202 15:24:31.108509 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:24:46 crc kubenswrapper[4902]: I1202 15:24:46.108227 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:24:46 crc kubenswrapper[4902]: E1202 15:24:46.109525 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:25:00 crc kubenswrapper[4902]: I1202 15:25:00.107731 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:25:00 crc kubenswrapper[4902]: E1202 15:25:00.108876 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:25:12 crc kubenswrapper[4902]: I1202 15:25:12.107208 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:25:12 crc kubenswrapper[4902]: E1202 15:25:12.108091 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.318422 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ktv4s"] Dec 02 15:25:18 crc kubenswrapper[4902]: E1202 15:25:18.319751 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4ffe681-7c06-448d-b01d-f105fca1265c" containerName="extract-content" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.319773 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4ffe681-7c06-448d-b01d-f105fca1265c" containerName="extract-content" Dec 02 15:25:18 crc kubenswrapper[4902]: E1202 15:25:18.319800 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4ffe681-7c06-448d-b01d-f105fca1265c" containerName="extract-utilities" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.319813 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4ffe681-7c06-448d-b01d-f105fca1265c" containerName="extract-utilities" Dec 02 15:25:18 crc kubenswrapper[4902]: E1202 15:25:18.319842 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4ffe681-7c06-448d-b01d-f105fca1265c" containerName="registry-server" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.319855 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4ffe681-7c06-448d-b01d-f105fca1265c" containerName="registry-server" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.320222 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4ffe681-7c06-448d-b01d-f105fca1265c" containerName="registry-server" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.322937 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.364829 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ktv4s"] Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.460760 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac72e289-8af7-4534-a325-fddddca513cc-catalog-content\") pod \"redhat-operators-ktv4s\" (UID: \"ac72e289-8af7-4534-a325-fddddca513cc\") " pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.460849 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac72e289-8af7-4534-a325-fddddca513cc-utilities\") pod \"redhat-operators-ktv4s\" (UID: \"ac72e289-8af7-4534-a325-fddddca513cc\") " pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.461533 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mp85\" (UniqueName: \"kubernetes.io/projected/ac72e289-8af7-4534-a325-fddddca513cc-kube-api-access-5mp85\") pod \"redhat-operators-ktv4s\" (UID: \"ac72e289-8af7-4534-a325-fddddca513cc\") " pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.562912 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mp85\" (UniqueName: \"kubernetes.io/projected/ac72e289-8af7-4534-a325-fddddca513cc-kube-api-access-5mp85\") pod \"redhat-operators-ktv4s\" (UID: \"ac72e289-8af7-4534-a325-fddddca513cc\") " pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.562985 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac72e289-8af7-4534-a325-fddddca513cc-catalog-content\") pod \"redhat-operators-ktv4s\" (UID: \"ac72e289-8af7-4534-a325-fddddca513cc\") " pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.563065 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac72e289-8af7-4534-a325-fddddca513cc-utilities\") pod \"redhat-operators-ktv4s\" (UID: \"ac72e289-8af7-4534-a325-fddddca513cc\") " pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.563614 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac72e289-8af7-4534-a325-fddddca513cc-utilities\") pod \"redhat-operators-ktv4s\" (UID: \"ac72e289-8af7-4534-a325-fddddca513cc\") " pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.564441 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac72e289-8af7-4534-a325-fddddca513cc-catalog-content\") pod \"redhat-operators-ktv4s\" (UID: \"ac72e289-8af7-4534-a325-fddddca513cc\") " pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.586833 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mp85\" (UniqueName: \"kubernetes.io/projected/ac72e289-8af7-4534-a325-fddddca513cc-kube-api-access-5mp85\") pod \"redhat-operators-ktv4s\" (UID: \"ac72e289-8af7-4534-a325-fddddca513cc\") " pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:25:18 crc kubenswrapper[4902]: I1202 15:25:18.657254 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:25:19 crc kubenswrapper[4902]: I1202 15:25:19.129707 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ktv4s"] Dec 02 15:25:19 crc kubenswrapper[4902]: I1202 15:25:19.775406 4902 generic.go:334] "Generic (PLEG): container finished" podID="ac72e289-8af7-4534-a325-fddddca513cc" containerID="aaee14a15df1d2786eb09ec8e9f2e79bb38a67c8d3b8d09efcb127bd6913f5f9" exitCode=0 Dec 02 15:25:19 crc kubenswrapper[4902]: I1202 15:25:19.775503 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ktv4s" event={"ID":"ac72e289-8af7-4534-a325-fddddca513cc","Type":"ContainerDied","Data":"aaee14a15df1d2786eb09ec8e9f2e79bb38a67c8d3b8d09efcb127bd6913f5f9"} Dec 02 15:25:19 crc kubenswrapper[4902]: I1202 15:25:19.776724 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ktv4s" event={"ID":"ac72e289-8af7-4534-a325-fddddca513cc","Type":"ContainerStarted","Data":"a5a5b63ee2b1e4b8f2a255ece6e5131f08a8e9883826556f09a25f54dfbb449e"} Dec 02 15:25:25 crc kubenswrapper[4902]: I1202 15:25:25.107133 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:25:25 crc kubenswrapper[4902]: E1202 15:25:25.109261 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:25:29 crc kubenswrapper[4902]: I1202 15:25:29.885115 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ktv4s" event={"ID":"ac72e289-8af7-4534-a325-fddddca513cc","Type":"ContainerStarted","Data":"44312ed7c5be567a2be50669b64c12596c145acf4fdc365a6efac3596110dc91"} Dec 02 15:25:32 crc kubenswrapper[4902]: I1202 15:25:32.916826 4902 generic.go:334] "Generic (PLEG): container finished" podID="ac72e289-8af7-4534-a325-fddddca513cc" containerID="44312ed7c5be567a2be50669b64c12596c145acf4fdc365a6efac3596110dc91" exitCode=0 Dec 02 15:25:32 crc kubenswrapper[4902]: I1202 15:25:32.916916 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ktv4s" event={"ID":"ac72e289-8af7-4534-a325-fddddca513cc","Type":"ContainerDied","Data":"44312ed7c5be567a2be50669b64c12596c145acf4fdc365a6efac3596110dc91"} Dec 02 15:25:34 crc kubenswrapper[4902]: I1202 15:25:34.945090 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ktv4s" event={"ID":"ac72e289-8af7-4534-a325-fddddca513cc","Type":"ContainerStarted","Data":"ca1dd27d9173b1846a2d47fbb9ad3d0be0dbec390ca6c07b1fe249f87b543b16"} Dec 02 15:25:34 crc kubenswrapper[4902]: I1202 15:25:34.983492 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ktv4s" podStartSLOduration=3.227057778 podStartE2EDuration="16.983463558s" podCreationTimestamp="2025-12-02 15:25:18 +0000 UTC" firstStartedPulling="2025-12-02 15:25:19.777504157 +0000 UTC m=+4150.968812856" lastFinishedPulling="2025-12-02 15:25:33.533909927 +0000 UTC m=+4164.725218636" observedRunningTime="2025-12-02 15:25:34.970734256 +0000 UTC m=+4166.162042985" watchObservedRunningTime="2025-12-02 15:25:34.983463558 +0000 UTC m=+4166.174772267" Dec 02 15:25:36 crc kubenswrapper[4902]: I1202 15:25:36.747618 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-msjkd"] Dec 02 15:25:36 crc kubenswrapper[4902]: I1202 15:25:36.750094 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:36 crc kubenswrapper[4902]: I1202 15:25:36.762182 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-msjkd"] Dec 02 15:25:36 crc kubenswrapper[4902]: I1202 15:25:36.834785 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvf6c\" (UniqueName: \"kubernetes.io/projected/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-kube-api-access-jvf6c\") pod \"certified-operators-msjkd\" (UID: \"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9\") " pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:36 crc kubenswrapper[4902]: I1202 15:25:36.834852 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-catalog-content\") pod \"certified-operators-msjkd\" (UID: \"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9\") " pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:36 crc kubenswrapper[4902]: I1202 15:25:36.834931 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-utilities\") pod \"certified-operators-msjkd\" (UID: \"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9\") " pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:36 crc kubenswrapper[4902]: I1202 15:25:36.936838 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-catalog-content\") pod \"certified-operators-msjkd\" (UID: \"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9\") " pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:36 crc kubenswrapper[4902]: I1202 15:25:36.937302 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-utilities\") pod \"certified-operators-msjkd\" (UID: \"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9\") " pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:36 crc kubenswrapper[4902]: I1202 15:25:36.937432 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvf6c\" (UniqueName: \"kubernetes.io/projected/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-kube-api-access-jvf6c\") pod \"certified-operators-msjkd\" (UID: \"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9\") " pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:36 crc kubenswrapper[4902]: I1202 15:25:36.938236 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-catalog-content\") pod \"certified-operators-msjkd\" (UID: \"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9\") " pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:36 crc kubenswrapper[4902]: I1202 15:25:36.938383 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-utilities\") pod \"certified-operators-msjkd\" (UID: \"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9\") " pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:36 crc kubenswrapper[4902]: I1202 15:25:36.957471 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvf6c\" (UniqueName: \"kubernetes.io/projected/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-kube-api-access-jvf6c\") pod \"certified-operators-msjkd\" (UID: \"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9\") " pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:37 crc kubenswrapper[4902]: I1202 15:25:37.096541 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:37 crc kubenswrapper[4902]: I1202 15:25:37.109439 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:25:37 crc kubenswrapper[4902]: I1202 15:25:37.443072 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-msjkd"] Dec 02 15:25:37 crc kubenswrapper[4902]: W1202 15:25:37.461817 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d58e699_c09f_4bb3_8d0f_ea441c8d2fb9.slice/crio-810a44033b930c94a86ba5e7478dc6d7b3813fc5516f92b66ae33fe3036a7208 WatchSource:0}: Error finding container 810a44033b930c94a86ba5e7478dc6d7b3813fc5516f92b66ae33fe3036a7208: Status 404 returned error can't find the container with id 810a44033b930c94a86ba5e7478dc6d7b3813fc5516f92b66ae33fe3036a7208 Dec 02 15:25:37 crc kubenswrapper[4902]: I1202 15:25:37.980201 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"c9801371f32b7f41a4ce701a046abc392511a791f12891e944cdfc2424073653"} Dec 02 15:25:37 crc kubenswrapper[4902]: I1202 15:25:37.982691 4902 generic.go:334] "Generic (PLEG): container finished" podID="9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9" containerID="b2b58c8e5dacead5f7ef0fb4a7cf552132467686c98a115db68e3aa713e9184e" exitCode=0 Dec 02 15:25:37 crc kubenswrapper[4902]: I1202 15:25:37.982746 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-msjkd" event={"ID":"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9","Type":"ContainerDied","Data":"b2b58c8e5dacead5f7ef0fb4a7cf552132467686c98a115db68e3aa713e9184e"} Dec 02 15:25:37 crc kubenswrapper[4902]: I1202 15:25:37.982779 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-msjkd" event={"ID":"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9","Type":"ContainerStarted","Data":"810a44033b930c94a86ba5e7478dc6d7b3813fc5516f92b66ae33fe3036a7208"} Dec 02 15:25:38 crc kubenswrapper[4902]: I1202 15:25:38.657498 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:25:38 crc kubenswrapper[4902]: I1202 15:25:38.657925 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:25:38 crc kubenswrapper[4902]: I1202 15:25:38.994214 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-msjkd" event={"ID":"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9","Type":"ContainerStarted","Data":"f1f0097adcbf111064f92485988f69c54eb12b22a600a6977c53f3cc965ca7bd"} Dec 02 15:25:39 crc kubenswrapper[4902]: I1202 15:25:39.701054 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ktv4s" podUID="ac72e289-8af7-4534-a325-fddddca513cc" containerName="registry-server" probeResult="failure" output=< Dec 02 15:25:39 crc kubenswrapper[4902]: timeout: failed to connect service ":50051" within 1s Dec 02 15:25:39 crc kubenswrapper[4902]: > Dec 02 15:25:40 crc kubenswrapper[4902]: I1202 15:25:40.006399 4902 generic.go:334] "Generic (PLEG): container finished" podID="9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9" containerID="f1f0097adcbf111064f92485988f69c54eb12b22a600a6977c53f3cc965ca7bd" exitCode=0 Dec 02 15:25:40 crc kubenswrapper[4902]: I1202 15:25:40.006656 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-msjkd" event={"ID":"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9","Type":"ContainerDied","Data":"f1f0097adcbf111064f92485988f69c54eb12b22a600a6977c53f3cc965ca7bd"} Dec 02 15:25:41 crc kubenswrapper[4902]: I1202 15:25:41.016996 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-msjkd" event={"ID":"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9","Type":"ContainerStarted","Data":"d0b4bc2f9a88d9efa4273f16fc7ac9493d8de4062552ad9567310468df34c3ee"} Dec 02 15:25:41 crc kubenswrapper[4902]: I1202 15:25:41.042101 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-msjkd" podStartSLOduration=2.363390448 podStartE2EDuration="5.042085211s" podCreationTimestamp="2025-12-02 15:25:36 +0000 UTC" firstStartedPulling="2025-12-02 15:25:37.984906343 +0000 UTC m=+4169.176215052" lastFinishedPulling="2025-12-02 15:25:40.663601106 +0000 UTC m=+4171.854909815" observedRunningTime="2025-12-02 15:25:41.036895014 +0000 UTC m=+4172.228203723" watchObservedRunningTime="2025-12-02 15:25:41.042085211 +0000 UTC m=+4172.233393920" Dec 02 15:25:47 crc kubenswrapper[4902]: I1202 15:25:47.136201 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:47 crc kubenswrapper[4902]: I1202 15:25:47.156351 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:47 crc kubenswrapper[4902]: I1202 15:25:47.218927 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:48 crc kubenswrapper[4902]: I1202 15:25:48.234187 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:48 crc kubenswrapper[4902]: I1202 15:25:48.706278 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:25:48 crc kubenswrapper[4902]: I1202 15:25:48.732542 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-msjkd"] Dec 02 15:25:48 crc kubenswrapper[4902]: I1202 15:25:48.782543 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:25:50 crc kubenswrapper[4902]: I1202 15:25:50.180152 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-msjkd" podUID="9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9" containerName="registry-server" containerID="cri-o://d0b4bc2f9a88d9efa4273f16fc7ac9493d8de4062552ad9567310468df34c3ee" gracePeriod=2 Dec 02 15:25:50 crc kubenswrapper[4902]: I1202 15:25:50.185305 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ktv4s"] Dec 02 15:25:50 crc kubenswrapper[4902]: I1202 15:25:50.544811 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p8dbc"] Dec 02 15:25:50 crc kubenswrapper[4902]: I1202 15:25:50.545363 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-p8dbc" podUID="cb24e9b8-9890-479a-9248-e978a02e6823" containerName="registry-server" containerID="cri-o://2f0f9294c7af587721c78bc60d840bde3b236c08273e40522c3224a438afb980" gracePeriod=2 Dec 02 15:25:50 crc kubenswrapper[4902]: I1202 15:25:50.815323 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:50 crc kubenswrapper[4902]: I1202 15:25:50.831684 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-utilities\") pod \"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9\" (UID: \"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9\") " Dec 02 15:25:50 crc kubenswrapper[4902]: I1202 15:25:50.831911 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-catalog-content\") pod \"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9\" (UID: \"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9\") " Dec 02 15:25:50 crc kubenswrapper[4902]: I1202 15:25:50.832143 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvf6c\" (UniqueName: \"kubernetes.io/projected/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-kube-api-access-jvf6c\") pod \"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9\" (UID: \"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9\") " Dec 02 15:25:50 crc kubenswrapper[4902]: I1202 15:25:50.835243 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-utilities" (OuterVolumeSpecName: "utilities") pod "9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9" (UID: "9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:25:50 crc kubenswrapper[4902]: I1202 15:25:50.848539 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-kube-api-access-jvf6c" (OuterVolumeSpecName: "kube-api-access-jvf6c") pod "9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9" (UID: "9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9"). InnerVolumeSpecName "kube-api-access-jvf6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:25:50 crc kubenswrapper[4902]: E1202 15:25:50.865605 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb24e9b8_9890_479a_9248_e978a02e6823.slice/crio-2f0f9294c7af587721c78bc60d840bde3b236c08273e40522c3224a438afb980.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb24e9b8_9890_479a_9248_e978a02e6823.slice/crio-conmon-2f0f9294c7af587721c78bc60d840bde3b236c08273e40522c3224a438afb980.scope\": RecentStats: unable to find data in memory cache]" Dec 02 15:25:50 crc kubenswrapper[4902]: I1202 15:25:50.890017 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9" (UID: "9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:25:50 crc kubenswrapper[4902]: I1202 15:25:50.935457 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvf6c\" (UniqueName: \"kubernetes.io/projected/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-kube-api-access-jvf6c\") on node \"crc\" DevicePath \"\"" Dec 02 15:25:50 crc kubenswrapper[4902]: I1202 15:25:50.935667 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:25:50 crc kubenswrapper[4902]: I1202 15:25:50.935729 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.075023 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.143066 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb24e9b8-9890-479a-9248-e978a02e6823-utilities\") pod \"cb24e9b8-9890-479a-9248-e978a02e6823\" (UID: \"cb24e9b8-9890-479a-9248-e978a02e6823\") " Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.143313 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssh45\" (UniqueName: \"kubernetes.io/projected/cb24e9b8-9890-479a-9248-e978a02e6823-kube-api-access-ssh45\") pod \"cb24e9b8-9890-479a-9248-e978a02e6823\" (UID: \"cb24e9b8-9890-479a-9248-e978a02e6823\") " Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.143360 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb24e9b8-9890-479a-9248-e978a02e6823-catalog-content\") pod \"cb24e9b8-9890-479a-9248-e978a02e6823\" (UID: \"cb24e9b8-9890-479a-9248-e978a02e6823\") " Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.144515 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb24e9b8-9890-479a-9248-e978a02e6823-utilities" (OuterVolumeSpecName: "utilities") pod "cb24e9b8-9890-479a-9248-e978a02e6823" (UID: "cb24e9b8-9890-479a-9248-e978a02e6823"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.148354 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb24e9b8-9890-479a-9248-e978a02e6823-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.155805 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb24e9b8-9890-479a-9248-e978a02e6823-kube-api-access-ssh45" (OuterVolumeSpecName: "kube-api-access-ssh45") pod "cb24e9b8-9890-479a-9248-e978a02e6823" (UID: "cb24e9b8-9890-479a-9248-e978a02e6823"). InnerVolumeSpecName "kube-api-access-ssh45". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.203381 4902 generic.go:334] "Generic (PLEG): container finished" podID="9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9" containerID="d0b4bc2f9a88d9efa4273f16fc7ac9493d8de4062552ad9567310468df34c3ee" exitCode=0 Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.206251 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-msjkd" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.207988 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-msjkd" event={"ID":"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9","Type":"ContainerDied","Data":"d0b4bc2f9a88d9efa4273f16fc7ac9493d8de4062552ad9567310468df34c3ee"} Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.208067 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-msjkd" event={"ID":"9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9","Type":"ContainerDied","Data":"810a44033b930c94a86ba5e7478dc6d7b3813fc5516f92b66ae33fe3036a7208"} Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.208090 4902 scope.go:117] "RemoveContainer" containerID="d0b4bc2f9a88d9efa4273f16fc7ac9493d8de4062552ad9567310468df34c3ee" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.214554 4902 generic.go:334] "Generic (PLEG): container finished" podID="cb24e9b8-9890-479a-9248-e978a02e6823" containerID="2f0f9294c7af587721c78bc60d840bde3b236c08273e40522c3224a438afb980" exitCode=0 Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.214621 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8dbc" event={"ID":"cb24e9b8-9890-479a-9248-e978a02e6823","Type":"ContainerDied","Data":"2f0f9294c7af587721c78bc60d840bde3b236c08273e40522c3224a438afb980"} Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.214657 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8dbc" event={"ID":"cb24e9b8-9890-479a-9248-e978a02e6823","Type":"ContainerDied","Data":"472c37555383ef250db2ada069edf49e16bb7990e2593631779dba030f5b3173"} Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.214733 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8dbc" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.234982 4902 scope.go:117] "RemoveContainer" containerID="f1f0097adcbf111064f92485988f69c54eb12b22a600a6977c53f3cc965ca7bd" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.251371 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssh45\" (UniqueName: \"kubernetes.io/projected/cb24e9b8-9890-479a-9248-e978a02e6823-kube-api-access-ssh45\") on node \"crc\" DevicePath \"\"" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.252683 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-msjkd"] Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.263145 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-msjkd"] Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.271758 4902 scope.go:117] "RemoveContainer" containerID="b2b58c8e5dacead5f7ef0fb4a7cf552132467686c98a115db68e3aa713e9184e" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.292069 4902 scope.go:117] "RemoveContainer" containerID="d0b4bc2f9a88d9efa4273f16fc7ac9493d8de4062552ad9567310468df34c3ee" Dec 02 15:25:51 crc kubenswrapper[4902]: E1202 15:25:51.292448 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0b4bc2f9a88d9efa4273f16fc7ac9493d8de4062552ad9567310468df34c3ee\": container with ID starting with d0b4bc2f9a88d9efa4273f16fc7ac9493d8de4062552ad9567310468df34c3ee not found: ID does not exist" containerID="d0b4bc2f9a88d9efa4273f16fc7ac9493d8de4062552ad9567310468df34c3ee" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.292481 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0b4bc2f9a88d9efa4273f16fc7ac9493d8de4062552ad9567310468df34c3ee"} err="failed to get container status \"d0b4bc2f9a88d9efa4273f16fc7ac9493d8de4062552ad9567310468df34c3ee\": rpc error: code = NotFound desc = could not find container \"d0b4bc2f9a88d9efa4273f16fc7ac9493d8de4062552ad9567310468df34c3ee\": container with ID starting with d0b4bc2f9a88d9efa4273f16fc7ac9493d8de4062552ad9567310468df34c3ee not found: ID does not exist" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.292505 4902 scope.go:117] "RemoveContainer" containerID="f1f0097adcbf111064f92485988f69c54eb12b22a600a6977c53f3cc965ca7bd" Dec 02 15:25:51 crc kubenswrapper[4902]: E1202 15:25:51.293315 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1f0097adcbf111064f92485988f69c54eb12b22a600a6977c53f3cc965ca7bd\": container with ID starting with f1f0097adcbf111064f92485988f69c54eb12b22a600a6977c53f3cc965ca7bd not found: ID does not exist" containerID="f1f0097adcbf111064f92485988f69c54eb12b22a600a6977c53f3cc965ca7bd" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.293340 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1f0097adcbf111064f92485988f69c54eb12b22a600a6977c53f3cc965ca7bd"} err="failed to get container status \"f1f0097adcbf111064f92485988f69c54eb12b22a600a6977c53f3cc965ca7bd\": rpc error: code = NotFound desc = could not find container \"f1f0097adcbf111064f92485988f69c54eb12b22a600a6977c53f3cc965ca7bd\": container with ID starting with f1f0097adcbf111064f92485988f69c54eb12b22a600a6977c53f3cc965ca7bd not found: ID does not exist" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.293355 4902 scope.go:117] "RemoveContainer" containerID="b2b58c8e5dacead5f7ef0fb4a7cf552132467686c98a115db68e3aa713e9184e" Dec 02 15:25:51 crc kubenswrapper[4902]: E1202 15:25:51.293570 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2b58c8e5dacead5f7ef0fb4a7cf552132467686c98a115db68e3aa713e9184e\": container with ID starting with b2b58c8e5dacead5f7ef0fb4a7cf552132467686c98a115db68e3aa713e9184e not found: ID does not exist" containerID="b2b58c8e5dacead5f7ef0fb4a7cf552132467686c98a115db68e3aa713e9184e" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.293594 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2b58c8e5dacead5f7ef0fb4a7cf552132467686c98a115db68e3aa713e9184e"} err="failed to get container status \"b2b58c8e5dacead5f7ef0fb4a7cf552132467686c98a115db68e3aa713e9184e\": rpc error: code = NotFound desc = could not find container \"b2b58c8e5dacead5f7ef0fb4a7cf552132467686c98a115db68e3aa713e9184e\": container with ID starting with b2b58c8e5dacead5f7ef0fb4a7cf552132467686c98a115db68e3aa713e9184e not found: ID does not exist" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.293610 4902 scope.go:117] "RemoveContainer" containerID="2f0f9294c7af587721c78bc60d840bde3b236c08273e40522c3224a438afb980" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.330403 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb24e9b8-9890-479a-9248-e978a02e6823-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb24e9b8-9890-479a-9248-e978a02e6823" (UID: "cb24e9b8-9890-479a-9248-e978a02e6823"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.353440 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb24e9b8-9890-479a-9248-e978a02e6823-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.368124 4902 scope.go:117] "RemoveContainer" containerID="5efaf2b33a31f007f55e3979e72df7e2e23ebb81a947a48c4ce9416479cf2157" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.389271 4902 scope.go:117] "RemoveContainer" containerID="948c86173574812673b72454e5d9596e16f52eecb0e06ee87d46e6f59eabdfc1" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.440681 4902 scope.go:117] "RemoveContainer" containerID="2f0f9294c7af587721c78bc60d840bde3b236c08273e40522c3224a438afb980" Dec 02 15:25:51 crc kubenswrapper[4902]: E1202 15:25:51.441131 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f0f9294c7af587721c78bc60d840bde3b236c08273e40522c3224a438afb980\": container with ID starting with 2f0f9294c7af587721c78bc60d840bde3b236c08273e40522c3224a438afb980 not found: ID does not exist" containerID="2f0f9294c7af587721c78bc60d840bde3b236c08273e40522c3224a438afb980" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.441183 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f0f9294c7af587721c78bc60d840bde3b236c08273e40522c3224a438afb980"} err="failed to get container status \"2f0f9294c7af587721c78bc60d840bde3b236c08273e40522c3224a438afb980\": rpc error: code = NotFound desc = could not find container \"2f0f9294c7af587721c78bc60d840bde3b236c08273e40522c3224a438afb980\": container with ID starting with 2f0f9294c7af587721c78bc60d840bde3b236c08273e40522c3224a438afb980 not found: ID does not exist" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.441209 4902 scope.go:117] "RemoveContainer" containerID="5efaf2b33a31f007f55e3979e72df7e2e23ebb81a947a48c4ce9416479cf2157" Dec 02 15:25:51 crc kubenswrapper[4902]: E1202 15:25:51.441493 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5efaf2b33a31f007f55e3979e72df7e2e23ebb81a947a48c4ce9416479cf2157\": container with ID starting with 5efaf2b33a31f007f55e3979e72df7e2e23ebb81a947a48c4ce9416479cf2157 not found: ID does not exist" containerID="5efaf2b33a31f007f55e3979e72df7e2e23ebb81a947a48c4ce9416479cf2157" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.441517 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5efaf2b33a31f007f55e3979e72df7e2e23ebb81a947a48c4ce9416479cf2157"} err="failed to get container status \"5efaf2b33a31f007f55e3979e72df7e2e23ebb81a947a48c4ce9416479cf2157\": rpc error: code = NotFound desc = could not find container \"5efaf2b33a31f007f55e3979e72df7e2e23ebb81a947a48c4ce9416479cf2157\": container with ID starting with 5efaf2b33a31f007f55e3979e72df7e2e23ebb81a947a48c4ce9416479cf2157 not found: ID does not exist" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.441533 4902 scope.go:117] "RemoveContainer" containerID="948c86173574812673b72454e5d9596e16f52eecb0e06ee87d46e6f59eabdfc1" Dec 02 15:25:51 crc kubenswrapper[4902]: E1202 15:25:51.441766 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"948c86173574812673b72454e5d9596e16f52eecb0e06ee87d46e6f59eabdfc1\": container with ID starting with 948c86173574812673b72454e5d9596e16f52eecb0e06ee87d46e6f59eabdfc1 not found: ID does not exist" containerID="948c86173574812673b72454e5d9596e16f52eecb0e06ee87d46e6f59eabdfc1" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.441799 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"948c86173574812673b72454e5d9596e16f52eecb0e06ee87d46e6f59eabdfc1"} err="failed to get container status \"948c86173574812673b72454e5d9596e16f52eecb0e06ee87d46e6f59eabdfc1\": rpc error: code = NotFound desc = could not find container \"948c86173574812673b72454e5d9596e16f52eecb0e06ee87d46e6f59eabdfc1\": container with ID starting with 948c86173574812673b72454e5d9596e16f52eecb0e06ee87d46e6f59eabdfc1 not found: ID does not exist" Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.551878 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p8dbc"] Dec 02 15:25:51 crc kubenswrapper[4902]: I1202 15:25:51.566843 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-p8dbc"] Dec 02 15:25:53 crc kubenswrapper[4902]: I1202 15:25:53.117283 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9" path="/var/lib/kubelet/pods/9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9/volumes" Dec 02 15:25:53 crc kubenswrapper[4902]: I1202 15:25:53.117934 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb24e9b8-9890-479a-9248-e978a02e6823" path="/var/lib/kubelet/pods/cb24e9b8-9890-479a-9248-e978a02e6823/volumes" Dec 02 15:28:04 crc kubenswrapper[4902]: I1202 15:28:04.732023 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:28:04 crc kubenswrapper[4902]: I1202 15:28:04.732722 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:28:34 crc kubenswrapper[4902]: I1202 15:28:34.733051 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:28:34 crc kubenswrapper[4902]: I1202 15:28:34.733643 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:28:55 crc kubenswrapper[4902]: I1202 15:28:55.811556 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zdmbl"] Dec 02 15:28:55 crc kubenswrapper[4902]: E1202 15:28:55.812825 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9" containerName="extract-content" Dec 02 15:28:55 crc kubenswrapper[4902]: I1202 15:28:55.812843 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9" containerName="extract-content" Dec 02 15:28:55 crc kubenswrapper[4902]: E1202 15:28:55.812863 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb24e9b8-9890-479a-9248-e978a02e6823" containerName="extract-content" Dec 02 15:28:55 crc kubenswrapper[4902]: I1202 15:28:55.812870 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb24e9b8-9890-479a-9248-e978a02e6823" containerName="extract-content" Dec 02 15:28:55 crc kubenswrapper[4902]: E1202 15:28:55.812882 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9" containerName="registry-server" Dec 02 15:28:55 crc kubenswrapper[4902]: I1202 15:28:55.812891 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9" containerName="registry-server" Dec 02 15:28:55 crc kubenswrapper[4902]: E1202 15:28:55.812904 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb24e9b8-9890-479a-9248-e978a02e6823" containerName="registry-server" Dec 02 15:28:55 crc kubenswrapper[4902]: I1202 15:28:55.812911 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb24e9b8-9890-479a-9248-e978a02e6823" containerName="registry-server" Dec 02 15:28:55 crc kubenswrapper[4902]: E1202 15:28:55.812937 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9" containerName="extract-utilities" Dec 02 15:28:55 crc kubenswrapper[4902]: I1202 15:28:55.812945 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9" containerName="extract-utilities" Dec 02 15:28:55 crc kubenswrapper[4902]: E1202 15:28:55.812956 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb24e9b8-9890-479a-9248-e978a02e6823" containerName="extract-utilities" Dec 02 15:28:55 crc kubenswrapper[4902]: I1202 15:28:55.812963 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb24e9b8-9890-479a-9248-e978a02e6823" containerName="extract-utilities" Dec 02 15:28:55 crc kubenswrapper[4902]: I1202 15:28:55.813177 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb24e9b8-9890-479a-9248-e978a02e6823" containerName="registry-server" Dec 02 15:28:55 crc kubenswrapper[4902]: I1202 15:28:55.813214 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d58e699-c09f-4bb3-8d0f-ea441c8d2fb9" containerName="registry-server" Dec 02 15:28:55 crc kubenswrapper[4902]: I1202 15:28:55.816316 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:28:55 crc kubenswrapper[4902]: I1202 15:28:55.835406 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zdmbl"] Dec 02 15:28:55 crc kubenswrapper[4902]: I1202 15:28:55.963367 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6lgh\" (UniqueName: \"kubernetes.io/projected/da630723-f253-43f0-8ce4-17a28a067226-kube-api-access-v6lgh\") pod \"community-operators-zdmbl\" (UID: \"da630723-f253-43f0-8ce4-17a28a067226\") " pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:28:55 crc kubenswrapper[4902]: I1202 15:28:55.963493 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da630723-f253-43f0-8ce4-17a28a067226-catalog-content\") pod \"community-operators-zdmbl\" (UID: \"da630723-f253-43f0-8ce4-17a28a067226\") " pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:28:55 crc kubenswrapper[4902]: I1202 15:28:55.963547 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da630723-f253-43f0-8ce4-17a28a067226-utilities\") pod \"community-operators-zdmbl\" (UID: \"da630723-f253-43f0-8ce4-17a28a067226\") " pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:28:56 crc kubenswrapper[4902]: I1202 15:28:56.064821 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6lgh\" (UniqueName: \"kubernetes.io/projected/da630723-f253-43f0-8ce4-17a28a067226-kube-api-access-v6lgh\") pod \"community-operators-zdmbl\" (UID: \"da630723-f253-43f0-8ce4-17a28a067226\") " pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:28:56 crc kubenswrapper[4902]: I1202 15:28:56.064940 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da630723-f253-43f0-8ce4-17a28a067226-catalog-content\") pod \"community-operators-zdmbl\" (UID: \"da630723-f253-43f0-8ce4-17a28a067226\") " pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:28:56 crc kubenswrapper[4902]: I1202 15:28:56.064995 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da630723-f253-43f0-8ce4-17a28a067226-utilities\") pod \"community-operators-zdmbl\" (UID: \"da630723-f253-43f0-8ce4-17a28a067226\") " pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:28:56 crc kubenswrapper[4902]: I1202 15:28:56.065457 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da630723-f253-43f0-8ce4-17a28a067226-utilities\") pod \"community-operators-zdmbl\" (UID: \"da630723-f253-43f0-8ce4-17a28a067226\") " pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:28:56 crc kubenswrapper[4902]: I1202 15:28:56.066149 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da630723-f253-43f0-8ce4-17a28a067226-catalog-content\") pod \"community-operators-zdmbl\" (UID: \"da630723-f253-43f0-8ce4-17a28a067226\") " pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:28:56 crc kubenswrapper[4902]: I1202 15:28:56.083594 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6lgh\" (UniqueName: \"kubernetes.io/projected/da630723-f253-43f0-8ce4-17a28a067226-kube-api-access-v6lgh\") pod \"community-operators-zdmbl\" (UID: \"da630723-f253-43f0-8ce4-17a28a067226\") " pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:28:56 crc kubenswrapper[4902]: I1202 15:28:56.184624 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:28:56 crc kubenswrapper[4902]: I1202 15:28:56.778898 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zdmbl"] Dec 02 15:28:56 crc kubenswrapper[4902]: W1202 15:28:56.791770 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda630723_f253_43f0_8ce4_17a28a067226.slice/crio-529571465ffaeb6fce902a16753e9c93a1d0044528cf7dd898e7badf05e1dd73 WatchSource:0}: Error finding container 529571465ffaeb6fce902a16753e9c93a1d0044528cf7dd898e7badf05e1dd73: Status 404 returned error can't find the container with id 529571465ffaeb6fce902a16753e9c93a1d0044528cf7dd898e7badf05e1dd73 Dec 02 15:28:57 crc kubenswrapper[4902]: I1202 15:28:57.256601 4902 generic.go:334] "Generic (PLEG): container finished" podID="da630723-f253-43f0-8ce4-17a28a067226" containerID="2ffce6ba392cd3c54a639b9b11fb571a03f28fc8d736b230ce70caf73f5e41d4" exitCode=0 Dec 02 15:28:57 crc kubenswrapper[4902]: I1202 15:28:57.256642 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zdmbl" event={"ID":"da630723-f253-43f0-8ce4-17a28a067226","Type":"ContainerDied","Data":"2ffce6ba392cd3c54a639b9b11fb571a03f28fc8d736b230ce70caf73f5e41d4"} Dec 02 15:28:57 crc kubenswrapper[4902]: I1202 15:28:57.256668 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zdmbl" event={"ID":"da630723-f253-43f0-8ce4-17a28a067226","Type":"ContainerStarted","Data":"529571465ffaeb6fce902a16753e9c93a1d0044528cf7dd898e7badf05e1dd73"} Dec 02 15:28:57 crc kubenswrapper[4902]: I1202 15:28:57.258662 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 15:28:59 crc kubenswrapper[4902]: I1202 15:28:59.279010 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zdmbl" event={"ID":"da630723-f253-43f0-8ce4-17a28a067226","Type":"ContainerStarted","Data":"611e3715ca704e833f7744a9e42e7c2eab392223ee14cb83fcefc61a550f34ea"} Dec 02 15:29:00 crc kubenswrapper[4902]: I1202 15:29:00.290739 4902 generic.go:334] "Generic (PLEG): container finished" podID="da630723-f253-43f0-8ce4-17a28a067226" containerID="611e3715ca704e833f7744a9e42e7c2eab392223ee14cb83fcefc61a550f34ea" exitCode=0 Dec 02 15:29:00 crc kubenswrapper[4902]: I1202 15:29:00.290794 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zdmbl" event={"ID":"da630723-f253-43f0-8ce4-17a28a067226","Type":"ContainerDied","Data":"611e3715ca704e833f7744a9e42e7c2eab392223ee14cb83fcefc61a550f34ea"} Dec 02 15:29:02 crc kubenswrapper[4902]: I1202 15:29:02.314535 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zdmbl" event={"ID":"da630723-f253-43f0-8ce4-17a28a067226","Type":"ContainerStarted","Data":"80f2b04745c5e7057f0ab99ef5690b388f9f7cdace35b6141b63ad086a5dabe5"} Dec 02 15:29:02 crc kubenswrapper[4902]: I1202 15:29:02.351244 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zdmbl" podStartSLOduration=3.8136687460000003 podStartE2EDuration="7.35122522s" podCreationTimestamp="2025-12-02 15:28:55 +0000 UTC" firstStartedPulling="2025-12-02 15:28:57.258440965 +0000 UTC m=+4368.449749674" lastFinishedPulling="2025-12-02 15:29:00.795997439 +0000 UTC m=+4371.987306148" observedRunningTime="2025-12-02 15:29:02.341283538 +0000 UTC m=+4373.532592257" watchObservedRunningTime="2025-12-02 15:29:02.35122522 +0000 UTC m=+4373.542533939" Dec 02 15:29:04 crc kubenswrapper[4902]: I1202 15:29:04.731277 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:29:04 crc kubenswrapper[4902]: I1202 15:29:04.732210 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:29:04 crc kubenswrapper[4902]: I1202 15:29:04.732276 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 15:29:04 crc kubenswrapper[4902]: I1202 15:29:04.733388 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c9801371f32b7f41a4ce701a046abc392511a791f12891e944cdfc2424073653"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 15:29:04 crc kubenswrapper[4902]: I1202 15:29:04.733451 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://c9801371f32b7f41a4ce701a046abc392511a791f12891e944cdfc2424073653" gracePeriod=600 Dec 02 15:29:05 crc kubenswrapper[4902]: I1202 15:29:05.346295 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="c9801371f32b7f41a4ce701a046abc392511a791f12891e944cdfc2424073653" exitCode=0 Dec 02 15:29:05 crc kubenswrapper[4902]: I1202 15:29:05.346379 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"c9801371f32b7f41a4ce701a046abc392511a791f12891e944cdfc2424073653"} Dec 02 15:29:05 crc kubenswrapper[4902]: I1202 15:29:05.346680 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc"} Dec 02 15:29:05 crc kubenswrapper[4902]: I1202 15:29:05.346707 4902 scope.go:117] "RemoveContainer" containerID="b5e46259481854592783d9947a34876c00b94410c3b5488ea0e64305a74bee6c" Dec 02 15:29:06 crc kubenswrapper[4902]: I1202 15:29:06.185268 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:29:06 crc kubenswrapper[4902]: I1202 15:29:06.185937 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:29:06 crc kubenswrapper[4902]: I1202 15:29:06.235234 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:29:06 crc kubenswrapper[4902]: I1202 15:29:06.406806 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:29:06 crc kubenswrapper[4902]: I1202 15:29:06.590309 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zdmbl"] Dec 02 15:29:08 crc kubenswrapper[4902]: I1202 15:29:08.380166 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zdmbl" podUID="da630723-f253-43f0-8ce4-17a28a067226" containerName="registry-server" containerID="cri-o://80f2b04745c5e7057f0ab99ef5690b388f9f7cdace35b6141b63ad086a5dabe5" gracePeriod=2 Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.001721 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.028331 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6lgh\" (UniqueName: \"kubernetes.io/projected/da630723-f253-43f0-8ce4-17a28a067226-kube-api-access-v6lgh\") pod \"da630723-f253-43f0-8ce4-17a28a067226\" (UID: \"da630723-f253-43f0-8ce4-17a28a067226\") " Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.028485 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da630723-f253-43f0-8ce4-17a28a067226-utilities\") pod \"da630723-f253-43f0-8ce4-17a28a067226\" (UID: \"da630723-f253-43f0-8ce4-17a28a067226\") " Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.028528 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da630723-f253-43f0-8ce4-17a28a067226-catalog-content\") pod \"da630723-f253-43f0-8ce4-17a28a067226\" (UID: \"da630723-f253-43f0-8ce4-17a28a067226\") " Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.030616 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da630723-f253-43f0-8ce4-17a28a067226-utilities" (OuterVolumeSpecName: "utilities") pod "da630723-f253-43f0-8ce4-17a28a067226" (UID: "da630723-f253-43f0-8ce4-17a28a067226"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.098197 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da630723-f253-43f0-8ce4-17a28a067226-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "da630723-f253-43f0-8ce4-17a28a067226" (UID: "da630723-f253-43f0-8ce4-17a28a067226"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.134198 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da630723-f253-43f0-8ce4-17a28a067226-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.134277 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da630723-f253-43f0-8ce4-17a28a067226-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.394421 4902 generic.go:334] "Generic (PLEG): container finished" podID="da630723-f253-43f0-8ce4-17a28a067226" containerID="80f2b04745c5e7057f0ab99ef5690b388f9f7cdace35b6141b63ad086a5dabe5" exitCode=0 Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.394477 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zdmbl" event={"ID":"da630723-f253-43f0-8ce4-17a28a067226","Type":"ContainerDied","Data":"80f2b04745c5e7057f0ab99ef5690b388f9f7cdace35b6141b63ad086a5dabe5"} Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.394509 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zdmbl" event={"ID":"da630723-f253-43f0-8ce4-17a28a067226","Type":"ContainerDied","Data":"529571465ffaeb6fce902a16753e9c93a1d0044528cf7dd898e7badf05e1dd73"} Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.394507 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zdmbl" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.394531 4902 scope.go:117] "RemoveContainer" containerID="80f2b04745c5e7057f0ab99ef5690b388f9f7cdace35b6141b63ad086a5dabe5" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.436634 4902 scope.go:117] "RemoveContainer" containerID="611e3715ca704e833f7744a9e42e7c2eab392223ee14cb83fcefc61a550f34ea" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.615915 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da630723-f253-43f0-8ce4-17a28a067226-kube-api-access-v6lgh" (OuterVolumeSpecName: "kube-api-access-v6lgh") pod "da630723-f253-43f0-8ce4-17a28a067226" (UID: "da630723-f253-43f0-8ce4-17a28a067226"). InnerVolumeSpecName "kube-api-access-v6lgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.626393 4902 scope.go:117] "RemoveContainer" containerID="2ffce6ba392cd3c54a639b9b11fb571a03f28fc8d736b230ce70caf73f5e41d4" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.642410 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6lgh\" (UniqueName: \"kubernetes.io/projected/da630723-f253-43f0-8ce4-17a28a067226-kube-api-access-v6lgh\") on node \"crc\" DevicePath \"\"" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.749267 4902 scope.go:117] "RemoveContainer" containerID="80f2b04745c5e7057f0ab99ef5690b388f9f7cdace35b6141b63ad086a5dabe5" Dec 02 15:29:09 crc kubenswrapper[4902]: E1202 15:29:09.749902 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80f2b04745c5e7057f0ab99ef5690b388f9f7cdace35b6141b63ad086a5dabe5\": container with ID starting with 80f2b04745c5e7057f0ab99ef5690b388f9f7cdace35b6141b63ad086a5dabe5 not found: ID does not exist" containerID="80f2b04745c5e7057f0ab99ef5690b388f9f7cdace35b6141b63ad086a5dabe5" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.749930 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80f2b04745c5e7057f0ab99ef5690b388f9f7cdace35b6141b63ad086a5dabe5"} err="failed to get container status \"80f2b04745c5e7057f0ab99ef5690b388f9f7cdace35b6141b63ad086a5dabe5\": rpc error: code = NotFound desc = could not find container \"80f2b04745c5e7057f0ab99ef5690b388f9f7cdace35b6141b63ad086a5dabe5\": container with ID starting with 80f2b04745c5e7057f0ab99ef5690b388f9f7cdace35b6141b63ad086a5dabe5 not found: ID does not exist" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.749951 4902 scope.go:117] "RemoveContainer" containerID="611e3715ca704e833f7744a9e42e7c2eab392223ee14cb83fcefc61a550f34ea" Dec 02 15:29:09 crc kubenswrapper[4902]: E1202 15:29:09.750268 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"611e3715ca704e833f7744a9e42e7c2eab392223ee14cb83fcefc61a550f34ea\": container with ID starting with 611e3715ca704e833f7744a9e42e7c2eab392223ee14cb83fcefc61a550f34ea not found: ID does not exist" containerID="611e3715ca704e833f7744a9e42e7c2eab392223ee14cb83fcefc61a550f34ea" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.750362 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"611e3715ca704e833f7744a9e42e7c2eab392223ee14cb83fcefc61a550f34ea"} err="failed to get container status \"611e3715ca704e833f7744a9e42e7c2eab392223ee14cb83fcefc61a550f34ea\": rpc error: code = NotFound desc = could not find container \"611e3715ca704e833f7744a9e42e7c2eab392223ee14cb83fcefc61a550f34ea\": container with ID starting with 611e3715ca704e833f7744a9e42e7c2eab392223ee14cb83fcefc61a550f34ea not found: ID does not exist" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.750420 4902 scope.go:117] "RemoveContainer" containerID="2ffce6ba392cd3c54a639b9b11fb571a03f28fc8d736b230ce70caf73f5e41d4" Dec 02 15:29:09 crc kubenswrapper[4902]: E1202 15:29:09.750817 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ffce6ba392cd3c54a639b9b11fb571a03f28fc8d736b230ce70caf73f5e41d4\": container with ID starting with 2ffce6ba392cd3c54a639b9b11fb571a03f28fc8d736b230ce70caf73f5e41d4 not found: ID does not exist" containerID="2ffce6ba392cd3c54a639b9b11fb571a03f28fc8d736b230ce70caf73f5e41d4" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.750844 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ffce6ba392cd3c54a639b9b11fb571a03f28fc8d736b230ce70caf73f5e41d4"} err="failed to get container status \"2ffce6ba392cd3c54a639b9b11fb571a03f28fc8d736b230ce70caf73f5e41d4\": rpc error: code = NotFound desc = could not find container \"2ffce6ba392cd3c54a639b9b11fb571a03f28fc8d736b230ce70caf73f5e41d4\": container with ID starting with 2ffce6ba392cd3c54a639b9b11fb571a03f28fc8d736b230ce70caf73f5e41d4 not found: ID does not exist" Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.808293 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zdmbl"] Dec 02 15:29:09 crc kubenswrapper[4902]: I1202 15:29:09.821682 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zdmbl"] Dec 02 15:29:11 crc kubenswrapper[4902]: I1202 15:29:11.118251 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da630723-f253-43f0-8ce4-17a28a067226" path="/var/lib/kubelet/pods/da630723-f253-43f0-8ce4-17a28a067226/volumes" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.203964 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6"] Dec 02 15:30:00 crc kubenswrapper[4902]: E1202 15:30:00.205934 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da630723-f253-43f0-8ce4-17a28a067226" containerName="registry-server" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.206102 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="da630723-f253-43f0-8ce4-17a28a067226" containerName="registry-server" Dec 02 15:30:00 crc kubenswrapper[4902]: E1202 15:30:00.206185 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da630723-f253-43f0-8ce4-17a28a067226" containerName="extract-content" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.206236 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="da630723-f253-43f0-8ce4-17a28a067226" containerName="extract-content" Dec 02 15:30:00 crc kubenswrapper[4902]: E1202 15:30:00.206295 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da630723-f253-43f0-8ce4-17a28a067226" containerName="extract-utilities" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.206344 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="da630723-f253-43f0-8ce4-17a28a067226" containerName="extract-utilities" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.206579 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="da630723-f253-43f0-8ce4-17a28a067226" containerName="registry-server" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.207345 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.210299 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.210344 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.215508 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6"] Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.340844 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdt4j\" (UniqueName: \"kubernetes.io/projected/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-kube-api-access-tdt4j\") pod \"collect-profiles-29411490-x29q6\" (UID: \"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.340961 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-secret-volume\") pod \"collect-profiles-29411490-x29q6\" (UID: \"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.341079 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-config-volume\") pod \"collect-profiles-29411490-x29q6\" (UID: \"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.448059 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-config-volume\") pod \"collect-profiles-29411490-x29q6\" (UID: \"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.448229 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdt4j\" (UniqueName: \"kubernetes.io/projected/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-kube-api-access-tdt4j\") pod \"collect-profiles-29411490-x29q6\" (UID: \"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.448341 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-secret-volume\") pod \"collect-profiles-29411490-x29q6\" (UID: \"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.452995 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-config-volume\") pod \"collect-profiles-29411490-x29q6\" (UID: \"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.455651 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-secret-volume\") pod \"collect-profiles-29411490-x29q6\" (UID: \"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.473181 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdt4j\" (UniqueName: \"kubernetes.io/projected/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-kube-api-access-tdt4j\") pod \"collect-profiles-29411490-x29q6\" (UID: \"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" Dec 02 15:30:00 crc kubenswrapper[4902]: I1202 15:30:00.541203 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" Dec 02 15:30:01 crc kubenswrapper[4902]: I1202 15:30:01.010014 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6"] Dec 02 15:30:02 crc kubenswrapper[4902]: I1202 15:30:02.007209 4902 generic.go:334] "Generic (PLEG): container finished" podID="c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38" containerID="40562ca625fff1cb4cb6c07bd16f7804794e2183115b75110906e261f737c495" exitCode=0 Dec 02 15:30:02 crc kubenswrapper[4902]: I1202 15:30:02.007886 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" event={"ID":"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38","Type":"ContainerDied","Data":"40562ca625fff1cb4cb6c07bd16f7804794e2183115b75110906e261f737c495"} Dec 02 15:30:02 crc kubenswrapper[4902]: I1202 15:30:02.007972 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" event={"ID":"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38","Type":"ContainerStarted","Data":"13bb41a22779da571af862325af5d2fdad25f64d827b875c5365f9a7f34ae579"} Dec 02 15:30:03 crc kubenswrapper[4902]: I1202 15:30:03.531996 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" Dec 02 15:30:03 crc kubenswrapper[4902]: I1202 15:30:03.626714 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-secret-volume\") pod \"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38\" (UID: \"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38\") " Dec 02 15:30:03 crc kubenswrapper[4902]: I1202 15:30:03.626769 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-config-volume\") pod \"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38\" (UID: \"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38\") " Dec 02 15:30:03 crc kubenswrapper[4902]: I1202 15:30:03.626855 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdt4j\" (UniqueName: \"kubernetes.io/projected/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-kube-api-access-tdt4j\") pod \"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38\" (UID: \"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38\") " Dec 02 15:30:03 crc kubenswrapper[4902]: I1202 15:30:03.627503 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-config-volume" (OuterVolumeSpecName: "config-volume") pod "c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38" (UID: "c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 15:30:03 crc kubenswrapper[4902]: I1202 15:30:03.634958 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38" (UID: "c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:30:03 crc kubenswrapper[4902]: I1202 15:30:03.635418 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-kube-api-access-tdt4j" (OuterVolumeSpecName: "kube-api-access-tdt4j") pod "c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38" (UID: "c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38"). InnerVolumeSpecName "kube-api-access-tdt4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:30:03 crc kubenswrapper[4902]: I1202 15:30:03.729271 4902 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 15:30:03 crc kubenswrapper[4902]: I1202 15:30:03.729309 4902 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 15:30:03 crc kubenswrapper[4902]: I1202 15:30:03.729321 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdt4j\" (UniqueName: \"kubernetes.io/projected/c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38-kube-api-access-tdt4j\") on node \"crc\" DevicePath \"\"" Dec 02 15:30:04 crc kubenswrapper[4902]: I1202 15:30:04.033513 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" event={"ID":"c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38","Type":"ContainerDied","Data":"13bb41a22779da571af862325af5d2fdad25f64d827b875c5365f9a7f34ae579"} Dec 02 15:30:04 crc kubenswrapper[4902]: I1202 15:30:04.033554 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="13bb41a22779da571af862325af5d2fdad25f64d827b875c5365f9a7f34ae579" Dec 02 15:30:04 crc kubenswrapper[4902]: I1202 15:30:04.033689 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411490-x29q6" Dec 02 15:30:04 crc kubenswrapper[4902]: I1202 15:30:04.642661 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm"] Dec 02 15:30:04 crc kubenswrapper[4902]: I1202 15:30:04.652754 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411445-449wm"] Dec 02 15:30:05 crc kubenswrapper[4902]: I1202 15:30:05.128178 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfef4315-d5ee-4ab6-8aef-deda4e073aec" path="/var/lib/kubelet/pods/dfef4315-d5ee-4ab6-8aef-deda4e073aec/volumes" Dec 02 15:30:33 crc kubenswrapper[4902]: I1202 15:30:33.874355 4902 scope.go:117] "RemoveContainer" containerID="0457824933db1ed4853254df0ac242ed42b30057c21b151e5f86e80c4b049245" Dec 02 15:31:34 crc kubenswrapper[4902]: I1202 15:31:34.731945 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:31:34 crc kubenswrapper[4902]: I1202 15:31:34.732723 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:32:04 crc kubenswrapper[4902]: I1202 15:32:04.731980 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:32:04 crc kubenswrapper[4902]: I1202 15:32:04.732552 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:32:34 crc kubenswrapper[4902]: I1202 15:32:34.731727 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:32:34 crc kubenswrapper[4902]: I1202 15:32:34.732503 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:32:34 crc kubenswrapper[4902]: I1202 15:32:34.732556 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 15:32:34 crc kubenswrapper[4902]: I1202 15:32:34.733354 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 15:32:34 crc kubenswrapper[4902]: I1202 15:32:34.733412 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" gracePeriod=600 Dec 02 15:32:34 crc kubenswrapper[4902]: E1202 15:32:34.863473 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:32:35 crc kubenswrapper[4902]: I1202 15:32:35.817643 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" exitCode=0 Dec 02 15:32:35 crc kubenswrapper[4902]: I1202 15:32:35.817721 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc"} Dec 02 15:32:35 crc kubenswrapper[4902]: I1202 15:32:35.818196 4902 scope.go:117] "RemoveContainer" containerID="c9801371f32b7f41a4ce701a046abc392511a791f12891e944cdfc2424073653" Dec 02 15:32:35 crc kubenswrapper[4902]: I1202 15:32:35.819397 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:32:35 crc kubenswrapper[4902]: E1202 15:32:35.819993 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:32:51 crc kubenswrapper[4902]: I1202 15:32:51.107087 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:32:51 crc kubenswrapper[4902]: E1202 15:32:51.108328 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:33:03 crc kubenswrapper[4902]: I1202 15:33:03.106625 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:33:03 crc kubenswrapper[4902]: E1202 15:33:03.109217 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:33:14 crc kubenswrapper[4902]: I1202 15:33:14.107093 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:33:14 crc kubenswrapper[4902]: E1202 15:33:14.107900 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:33:18 crc kubenswrapper[4902]: I1202 15:33:18.739826 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tq4ms"] Dec 02 15:33:18 crc kubenswrapper[4902]: E1202 15:33:18.741025 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38" containerName="collect-profiles" Dec 02 15:33:18 crc kubenswrapper[4902]: I1202 15:33:18.741045 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38" containerName="collect-profiles" Dec 02 15:33:18 crc kubenswrapper[4902]: I1202 15:33:18.741347 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6d191dd-f5ee-499f-b5e7-0ca9a0eb6d38" containerName="collect-profiles" Dec 02 15:33:18 crc kubenswrapper[4902]: I1202 15:33:18.743747 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:18 crc kubenswrapper[4902]: I1202 15:33:18.761257 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tq4ms"] Dec 02 15:33:18 crc kubenswrapper[4902]: I1202 15:33:18.922398 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ba1054e-aa49-4516-9bbe-9caea57c5d91-utilities\") pod \"redhat-marketplace-tq4ms\" (UID: \"3ba1054e-aa49-4516-9bbe-9caea57c5d91\") " pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:18 crc kubenswrapper[4902]: I1202 15:33:18.922900 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ba1054e-aa49-4516-9bbe-9caea57c5d91-catalog-content\") pod \"redhat-marketplace-tq4ms\" (UID: \"3ba1054e-aa49-4516-9bbe-9caea57c5d91\") " pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:18 crc kubenswrapper[4902]: I1202 15:33:18.922934 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4llr\" (UniqueName: \"kubernetes.io/projected/3ba1054e-aa49-4516-9bbe-9caea57c5d91-kube-api-access-s4llr\") pod \"redhat-marketplace-tq4ms\" (UID: \"3ba1054e-aa49-4516-9bbe-9caea57c5d91\") " pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:19 crc kubenswrapper[4902]: I1202 15:33:19.025029 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ba1054e-aa49-4516-9bbe-9caea57c5d91-utilities\") pod \"redhat-marketplace-tq4ms\" (UID: \"3ba1054e-aa49-4516-9bbe-9caea57c5d91\") " pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:19 crc kubenswrapper[4902]: I1202 15:33:19.025123 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ba1054e-aa49-4516-9bbe-9caea57c5d91-catalog-content\") pod \"redhat-marketplace-tq4ms\" (UID: \"3ba1054e-aa49-4516-9bbe-9caea57c5d91\") " pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:19 crc kubenswrapper[4902]: I1202 15:33:19.025164 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4llr\" (UniqueName: \"kubernetes.io/projected/3ba1054e-aa49-4516-9bbe-9caea57c5d91-kube-api-access-s4llr\") pod \"redhat-marketplace-tq4ms\" (UID: \"3ba1054e-aa49-4516-9bbe-9caea57c5d91\") " pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:19 crc kubenswrapper[4902]: I1202 15:33:19.025545 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ba1054e-aa49-4516-9bbe-9caea57c5d91-utilities\") pod \"redhat-marketplace-tq4ms\" (UID: \"3ba1054e-aa49-4516-9bbe-9caea57c5d91\") " pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:19 crc kubenswrapper[4902]: I1202 15:33:19.025870 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ba1054e-aa49-4516-9bbe-9caea57c5d91-catalog-content\") pod \"redhat-marketplace-tq4ms\" (UID: \"3ba1054e-aa49-4516-9bbe-9caea57c5d91\") " pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:19 crc kubenswrapper[4902]: I1202 15:33:19.412281 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4llr\" (UniqueName: \"kubernetes.io/projected/3ba1054e-aa49-4516-9bbe-9caea57c5d91-kube-api-access-s4llr\") pod \"redhat-marketplace-tq4ms\" (UID: \"3ba1054e-aa49-4516-9bbe-9caea57c5d91\") " pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:19 crc kubenswrapper[4902]: I1202 15:33:19.667449 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:20 crc kubenswrapper[4902]: I1202 15:33:20.192436 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tq4ms"] Dec 02 15:33:20 crc kubenswrapper[4902]: I1202 15:33:20.301197 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tq4ms" event={"ID":"3ba1054e-aa49-4516-9bbe-9caea57c5d91","Type":"ContainerStarted","Data":"e527bcfd044e76be97803ba8ac1880b431051d271f4379c6b13ec35715b6f9c2"} Dec 02 15:33:21 crc kubenswrapper[4902]: I1202 15:33:21.314301 4902 generic.go:334] "Generic (PLEG): container finished" podID="3ba1054e-aa49-4516-9bbe-9caea57c5d91" containerID="8b71586d96031c1ecfb9f2792234dd97afb6a5a0147908249350070ee34919d0" exitCode=0 Dec 02 15:33:21 crc kubenswrapper[4902]: I1202 15:33:21.314378 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tq4ms" event={"ID":"3ba1054e-aa49-4516-9bbe-9caea57c5d91","Type":"ContainerDied","Data":"8b71586d96031c1ecfb9f2792234dd97afb6a5a0147908249350070ee34919d0"} Dec 02 15:33:23 crc kubenswrapper[4902]: I1202 15:33:23.340779 4902 generic.go:334] "Generic (PLEG): container finished" podID="3ba1054e-aa49-4516-9bbe-9caea57c5d91" containerID="752c3d2a82d5d941a7f23a9d8e1d1c345798bd189763616e755a046d27068f72" exitCode=0 Dec 02 15:33:23 crc kubenswrapper[4902]: I1202 15:33:23.340859 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tq4ms" event={"ID":"3ba1054e-aa49-4516-9bbe-9caea57c5d91","Type":"ContainerDied","Data":"752c3d2a82d5d941a7f23a9d8e1d1c345798bd189763616e755a046d27068f72"} Dec 02 15:33:24 crc kubenswrapper[4902]: I1202 15:33:24.352239 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tq4ms" event={"ID":"3ba1054e-aa49-4516-9bbe-9caea57c5d91","Type":"ContainerStarted","Data":"755bad137faad7d6217956b41ace870ff6bb9ed44b60aef4764d6a1bffa26119"} Dec 02 15:33:24 crc kubenswrapper[4902]: I1202 15:33:24.384870 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tq4ms" podStartSLOduration=3.7538464940000003 podStartE2EDuration="6.384844893s" podCreationTimestamp="2025-12-02 15:33:18 +0000 UTC" firstStartedPulling="2025-12-02 15:33:21.316732754 +0000 UTC m=+4632.508041463" lastFinishedPulling="2025-12-02 15:33:23.947731153 +0000 UTC m=+4635.139039862" observedRunningTime="2025-12-02 15:33:24.375982731 +0000 UTC m=+4635.567291440" watchObservedRunningTime="2025-12-02 15:33:24.384844893 +0000 UTC m=+4635.576153602" Dec 02 15:33:29 crc kubenswrapper[4902]: I1202 15:33:29.120987 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:33:29 crc kubenswrapper[4902]: E1202 15:33:29.122389 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:33:29 crc kubenswrapper[4902]: I1202 15:33:29.667708 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:29 crc kubenswrapper[4902]: I1202 15:33:29.667789 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:29 crc kubenswrapper[4902]: I1202 15:33:29.764669 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:30 crc kubenswrapper[4902]: I1202 15:33:30.544320 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:30 crc kubenswrapper[4902]: I1202 15:33:30.608691 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tq4ms"] Dec 02 15:33:32 crc kubenswrapper[4902]: I1202 15:33:32.636841 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tq4ms" podUID="3ba1054e-aa49-4516-9bbe-9caea57c5d91" containerName="registry-server" containerID="cri-o://755bad137faad7d6217956b41ace870ff6bb9ed44b60aef4764d6a1bffa26119" gracePeriod=2 Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.251602 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.438540 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ba1054e-aa49-4516-9bbe-9caea57c5d91-utilities\") pod \"3ba1054e-aa49-4516-9bbe-9caea57c5d91\" (UID: \"3ba1054e-aa49-4516-9bbe-9caea57c5d91\") " Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.438650 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ba1054e-aa49-4516-9bbe-9caea57c5d91-catalog-content\") pod \"3ba1054e-aa49-4516-9bbe-9caea57c5d91\" (UID: \"3ba1054e-aa49-4516-9bbe-9caea57c5d91\") " Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.438708 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4llr\" (UniqueName: \"kubernetes.io/projected/3ba1054e-aa49-4516-9bbe-9caea57c5d91-kube-api-access-s4llr\") pod \"3ba1054e-aa49-4516-9bbe-9caea57c5d91\" (UID: \"3ba1054e-aa49-4516-9bbe-9caea57c5d91\") " Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.439707 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ba1054e-aa49-4516-9bbe-9caea57c5d91-utilities" (OuterVolumeSpecName: "utilities") pod "3ba1054e-aa49-4516-9bbe-9caea57c5d91" (UID: "3ba1054e-aa49-4516-9bbe-9caea57c5d91"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.457830 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ba1054e-aa49-4516-9bbe-9caea57c5d91-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3ba1054e-aa49-4516-9bbe-9caea57c5d91" (UID: "3ba1054e-aa49-4516-9bbe-9caea57c5d91"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.472979 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ba1054e-aa49-4516-9bbe-9caea57c5d91-kube-api-access-s4llr" (OuterVolumeSpecName: "kube-api-access-s4llr") pod "3ba1054e-aa49-4516-9bbe-9caea57c5d91" (UID: "3ba1054e-aa49-4516-9bbe-9caea57c5d91"). InnerVolumeSpecName "kube-api-access-s4llr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.541672 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ba1054e-aa49-4516-9bbe-9caea57c5d91-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.541709 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ba1054e-aa49-4516-9bbe-9caea57c5d91-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.541720 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4llr\" (UniqueName: \"kubernetes.io/projected/3ba1054e-aa49-4516-9bbe-9caea57c5d91-kube-api-access-s4llr\") on node \"crc\" DevicePath \"\"" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.649060 4902 generic.go:334] "Generic (PLEG): container finished" podID="3ba1054e-aa49-4516-9bbe-9caea57c5d91" containerID="755bad137faad7d6217956b41ace870ff6bb9ed44b60aef4764d6a1bffa26119" exitCode=0 Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.649102 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tq4ms" event={"ID":"3ba1054e-aa49-4516-9bbe-9caea57c5d91","Type":"ContainerDied","Data":"755bad137faad7d6217956b41ace870ff6bb9ed44b60aef4764d6a1bffa26119"} Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.649131 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tq4ms" event={"ID":"3ba1054e-aa49-4516-9bbe-9caea57c5d91","Type":"ContainerDied","Data":"e527bcfd044e76be97803ba8ac1880b431051d271f4379c6b13ec35715b6f9c2"} Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.649135 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tq4ms" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.649150 4902 scope.go:117] "RemoveContainer" containerID="755bad137faad7d6217956b41ace870ff6bb9ed44b60aef4764d6a1bffa26119" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.689651 4902 scope.go:117] "RemoveContainer" containerID="752c3d2a82d5d941a7f23a9d8e1d1c345798bd189763616e755a046d27068f72" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.693593 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tq4ms"] Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.702243 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tq4ms"] Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.709255 4902 scope.go:117] "RemoveContainer" containerID="8b71586d96031c1ecfb9f2792234dd97afb6a5a0147908249350070ee34919d0" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.756907 4902 scope.go:117] "RemoveContainer" containerID="755bad137faad7d6217956b41ace870ff6bb9ed44b60aef4764d6a1bffa26119" Dec 02 15:33:33 crc kubenswrapper[4902]: E1202 15:33:33.757308 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"755bad137faad7d6217956b41ace870ff6bb9ed44b60aef4764d6a1bffa26119\": container with ID starting with 755bad137faad7d6217956b41ace870ff6bb9ed44b60aef4764d6a1bffa26119 not found: ID does not exist" containerID="755bad137faad7d6217956b41ace870ff6bb9ed44b60aef4764d6a1bffa26119" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.757360 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"755bad137faad7d6217956b41ace870ff6bb9ed44b60aef4764d6a1bffa26119"} err="failed to get container status \"755bad137faad7d6217956b41ace870ff6bb9ed44b60aef4764d6a1bffa26119\": rpc error: code = NotFound desc = could not find container \"755bad137faad7d6217956b41ace870ff6bb9ed44b60aef4764d6a1bffa26119\": container with ID starting with 755bad137faad7d6217956b41ace870ff6bb9ed44b60aef4764d6a1bffa26119 not found: ID does not exist" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.757392 4902 scope.go:117] "RemoveContainer" containerID="752c3d2a82d5d941a7f23a9d8e1d1c345798bd189763616e755a046d27068f72" Dec 02 15:33:33 crc kubenswrapper[4902]: E1202 15:33:33.757748 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"752c3d2a82d5d941a7f23a9d8e1d1c345798bd189763616e755a046d27068f72\": container with ID starting with 752c3d2a82d5d941a7f23a9d8e1d1c345798bd189763616e755a046d27068f72 not found: ID does not exist" containerID="752c3d2a82d5d941a7f23a9d8e1d1c345798bd189763616e755a046d27068f72" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.757779 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"752c3d2a82d5d941a7f23a9d8e1d1c345798bd189763616e755a046d27068f72"} err="failed to get container status \"752c3d2a82d5d941a7f23a9d8e1d1c345798bd189763616e755a046d27068f72\": rpc error: code = NotFound desc = could not find container \"752c3d2a82d5d941a7f23a9d8e1d1c345798bd189763616e755a046d27068f72\": container with ID starting with 752c3d2a82d5d941a7f23a9d8e1d1c345798bd189763616e755a046d27068f72 not found: ID does not exist" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.757819 4902 scope.go:117] "RemoveContainer" containerID="8b71586d96031c1ecfb9f2792234dd97afb6a5a0147908249350070ee34919d0" Dec 02 15:33:33 crc kubenswrapper[4902]: E1202 15:33:33.758033 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b71586d96031c1ecfb9f2792234dd97afb6a5a0147908249350070ee34919d0\": container with ID starting with 8b71586d96031c1ecfb9f2792234dd97afb6a5a0147908249350070ee34919d0 not found: ID does not exist" containerID="8b71586d96031c1ecfb9f2792234dd97afb6a5a0147908249350070ee34919d0" Dec 02 15:33:33 crc kubenswrapper[4902]: I1202 15:33:33.758077 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b71586d96031c1ecfb9f2792234dd97afb6a5a0147908249350070ee34919d0"} err="failed to get container status \"8b71586d96031c1ecfb9f2792234dd97afb6a5a0147908249350070ee34919d0\": rpc error: code = NotFound desc = could not find container \"8b71586d96031c1ecfb9f2792234dd97afb6a5a0147908249350070ee34919d0\": container with ID starting with 8b71586d96031c1ecfb9f2792234dd97afb6a5a0147908249350070ee34919d0 not found: ID does not exist" Dec 02 15:33:33 crc kubenswrapper[4902]: E1202 15:33:33.808770 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ba1054e_aa49_4516_9bbe_9caea57c5d91.slice/crio-e527bcfd044e76be97803ba8ac1880b431051d271f4379c6b13ec35715b6f9c2\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ba1054e_aa49_4516_9bbe_9caea57c5d91.slice\": RecentStats: unable to find data in memory cache]" Dec 02 15:33:35 crc kubenswrapper[4902]: I1202 15:33:35.122327 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ba1054e-aa49-4516-9bbe-9caea57c5d91" path="/var/lib/kubelet/pods/3ba1054e-aa49-4516-9bbe-9caea57c5d91/volumes" Dec 02 15:33:41 crc kubenswrapper[4902]: I1202 15:33:41.106290 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:33:41 crc kubenswrapper[4902]: E1202 15:33:41.107043 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:33:55 crc kubenswrapper[4902]: I1202 15:33:55.107603 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:33:55 crc kubenswrapper[4902]: E1202 15:33:55.108604 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:34:10 crc kubenswrapper[4902]: I1202 15:34:10.106467 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:34:10 crc kubenswrapper[4902]: E1202 15:34:10.107451 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:34:25 crc kubenswrapper[4902]: I1202 15:34:25.107194 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:34:25 crc kubenswrapper[4902]: E1202 15:34:25.108029 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:34:36 crc kubenswrapper[4902]: I1202 15:34:36.107248 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:34:36 crc kubenswrapper[4902]: E1202 15:34:36.108153 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:34:49 crc kubenswrapper[4902]: I1202 15:34:49.120607 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:34:49 crc kubenswrapper[4902]: E1202 15:34:49.121332 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:35:00 crc kubenswrapper[4902]: I1202 15:35:00.106932 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:35:00 crc kubenswrapper[4902]: E1202 15:35:00.108143 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:35:11 crc kubenswrapper[4902]: I1202 15:35:11.107993 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:35:11 crc kubenswrapper[4902]: E1202 15:35:11.109357 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:35:23 crc kubenswrapper[4902]: I1202 15:35:23.106690 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:35:23 crc kubenswrapper[4902]: E1202 15:35:23.107592 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:35:24 crc kubenswrapper[4902]: I1202 15:35:24.562650 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vxkhh"] Dec 02 15:35:24 crc kubenswrapper[4902]: E1202 15:35:24.563405 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ba1054e-aa49-4516-9bbe-9caea57c5d91" containerName="extract-utilities" Dec 02 15:35:24 crc kubenswrapper[4902]: I1202 15:35:24.563423 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ba1054e-aa49-4516-9bbe-9caea57c5d91" containerName="extract-utilities" Dec 02 15:35:24 crc kubenswrapper[4902]: E1202 15:35:24.563458 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ba1054e-aa49-4516-9bbe-9caea57c5d91" containerName="registry-server" Dec 02 15:35:24 crc kubenswrapper[4902]: I1202 15:35:24.563466 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ba1054e-aa49-4516-9bbe-9caea57c5d91" containerName="registry-server" Dec 02 15:35:24 crc kubenswrapper[4902]: E1202 15:35:24.563491 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ba1054e-aa49-4516-9bbe-9caea57c5d91" containerName="extract-content" Dec 02 15:35:24 crc kubenswrapper[4902]: I1202 15:35:24.563499 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ba1054e-aa49-4516-9bbe-9caea57c5d91" containerName="extract-content" Dec 02 15:35:24 crc kubenswrapper[4902]: I1202 15:35:24.564239 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ba1054e-aa49-4516-9bbe-9caea57c5d91" containerName="registry-server" Dec 02 15:35:24 crc kubenswrapper[4902]: I1202 15:35:24.565696 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:24 crc kubenswrapper[4902]: I1202 15:35:24.575624 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vxkhh"] Dec 02 15:35:24 crc kubenswrapper[4902]: I1202 15:35:24.612703 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01d25719-c8a2-430d-b412-1c4b1fa71313-catalog-content\") pod \"redhat-operators-vxkhh\" (UID: \"01d25719-c8a2-430d-b412-1c4b1fa71313\") " pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:24 crc kubenswrapper[4902]: I1202 15:35:24.612811 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01d25719-c8a2-430d-b412-1c4b1fa71313-utilities\") pod \"redhat-operators-vxkhh\" (UID: \"01d25719-c8a2-430d-b412-1c4b1fa71313\") " pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:24 crc kubenswrapper[4902]: I1202 15:35:24.613032 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stnz6\" (UniqueName: \"kubernetes.io/projected/01d25719-c8a2-430d-b412-1c4b1fa71313-kube-api-access-stnz6\") pod \"redhat-operators-vxkhh\" (UID: \"01d25719-c8a2-430d-b412-1c4b1fa71313\") " pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:24 crc kubenswrapper[4902]: I1202 15:35:24.714849 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01d25719-c8a2-430d-b412-1c4b1fa71313-catalog-content\") pod \"redhat-operators-vxkhh\" (UID: \"01d25719-c8a2-430d-b412-1c4b1fa71313\") " pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:24 crc kubenswrapper[4902]: I1202 15:35:24.715253 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01d25719-c8a2-430d-b412-1c4b1fa71313-utilities\") pod \"redhat-operators-vxkhh\" (UID: \"01d25719-c8a2-430d-b412-1c4b1fa71313\") " pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:24 crc kubenswrapper[4902]: I1202 15:35:24.715383 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01d25719-c8a2-430d-b412-1c4b1fa71313-catalog-content\") pod \"redhat-operators-vxkhh\" (UID: \"01d25719-c8a2-430d-b412-1c4b1fa71313\") " pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:24 crc kubenswrapper[4902]: I1202 15:35:24.715720 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01d25719-c8a2-430d-b412-1c4b1fa71313-utilities\") pod \"redhat-operators-vxkhh\" (UID: \"01d25719-c8a2-430d-b412-1c4b1fa71313\") " pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:24 crc kubenswrapper[4902]: I1202 15:35:24.716508 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stnz6\" (UniqueName: \"kubernetes.io/projected/01d25719-c8a2-430d-b412-1c4b1fa71313-kube-api-access-stnz6\") pod \"redhat-operators-vxkhh\" (UID: \"01d25719-c8a2-430d-b412-1c4b1fa71313\") " pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:25 crc kubenswrapper[4902]: I1202 15:35:25.213409 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stnz6\" (UniqueName: \"kubernetes.io/projected/01d25719-c8a2-430d-b412-1c4b1fa71313-kube-api-access-stnz6\") pod \"redhat-operators-vxkhh\" (UID: \"01d25719-c8a2-430d-b412-1c4b1fa71313\") " pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:25 crc kubenswrapper[4902]: I1202 15:35:25.491143 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:26 crc kubenswrapper[4902]: I1202 15:35:26.012100 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vxkhh"] Dec 02 15:35:26 crc kubenswrapper[4902]: I1202 15:35:26.907477 4902 generic.go:334] "Generic (PLEG): container finished" podID="01d25719-c8a2-430d-b412-1c4b1fa71313" containerID="05a0733a137a61486e04057342407b5aaea53370af9d7b684fa5208e939a882c" exitCode=0 Dec 02 15:35:26 crc kubenswrapper[4902]: I1202 15:35:26.907773 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vxkhh" event={"ID":"01d25719-c8a2-430d-b412-1c4b1fa71313","Type":"ContainerDied","Data":"05a0733a137a61486e04057342407b5aaea53370af9d7b684fa5208e939a882c"} Dec 02 15:35:26 crc kubenswrapper[4902]: I1202 15:35:26.907802 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vxkhh" event={"ID":"01d25719-c8a2-430d-b412-1c4b1fa71313","Type":"ContainerStarted","Data":"da9c3e408ef10f37e96841da9ca5ebd644795c50a7394c56e8930933d9c131a4"} Dec 02 15:35:26 crc kubenswrapper[4902]: I1202 15:35:26.910051 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 15:35:28 crc kubenswrapper[4902]: I1202 15:35:28.931882 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vxkhh" event={"ID":"01d25719-c8a2-430d-b412-1c4b1fa71313","Type":"ContainerStarted","Data":"b7d2c728a470d8cac240808c92b764b4bdb51bed804024e81aa9e5ea2da60154"} Dec 02 15:35:30 crc kubenswrapper[4902]: I1202 15:35:30.951415 4902 generic.go:334] "Generic (PLEG): container finished" podID="01d25719-c8a2-430d-b412-1c4b1fa71313" containerID="b7d2c728a470d8cac240808c92b764b4bdb51bed804024e81aa9e5ea2da60154" exitCode=0 Dec 02 15:35:30 crc kubenswrapper[4902]: I1202 15:35:30.951494 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vxkhh" event={"ID":"01d25719-c8a2-430d-b412-1c4b1fa71313","Type":"ContainerDied","Data":"b7d2c728a470d8cac240808c92b764b4bdb51bed804024e81aa9e5ea2da60154"} Dec 02 15:35:31 crc kubenswrapper[4902]: I1202 15:35:31.962193 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vxkhh" event={"ID":"01d25719-c8a2-430d-b412-1c4b1fa71313","Type":"ContainerStarted","Data":"1bc88e72e9842d6a2b9a4a950e35fef7297e878c6c65447dee7a8d003c08b5f3"} Dec 02 15:35:34 crc kubenswrapper[4902]: I1202 15:35:34.107470 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:35:34 crc kubenswrapper[4902]: E1202 15:35:34.108130 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:35:35 crc kubenswrapper[4902]: I1202 15:35:35.491287 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:35 crc kubenswrapper[4902]: I1202 15:35:35.491608 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:36 crc kubenswrapper[4902]: I1202 15:35:36.566781 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vxkhh" podUID="01d25719-c8a2-430d-b412-1c4b1fa71313" containerName="registry-server" probeResult="failure" output=< Dec 02 15:35:36 crc kubenswrapper[4902]: timeout: failed to connect service ":50051" within 1s Dec 02 15:35:36 crc kubenswrapper[4902]: > Dec 02 15:35:45 crc kubenswrapper[4902]: I1202 15:35:45.556746 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:45 crc kubenswrapper[4902]: I1202 15:35:45.606950 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vxkhh" podStartSLOduration=17.052021728 podStartE2EDuration="21.606910083s" podCreationTimestamp="2025-12-02 15:35:24 +0000 UTC" firstStartedPulling="2025-12-02 15:35:26.909736843 +0000 UTC m=+4758.101045552" lastFinishedPulling="2025-12-02 15:35:31.464625168 +0000 UTC m=+4762.655933907" observedRunningTime="2025-12-02 15:35:31.987334987 +0000 UTC m=+4763.178643696" watchObservedRunningTime="2025-12-02 15:35:45.606910083 +0000 UTC m=+4776.798218852" Dec 02 15:35:45 crc kubenswrapper[4902]: I1202 15:35:45.632010 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:45 crc kubenswrapper[4902]: I1202 15:35:45.813547 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vxkhh"] Dec 02 15:35:46 crc kubenswrapper[4902]: I1202 15:35:46.107200 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:35:46 crc kubenswrapper[4902]: E1202 15:35:46.107487 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:35:47 crc kubenswrapper[4902]: I1202 15:35:47.150018 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vxkhh" podUID="01d25719-c8a2-430d-b412-1c4b1fa71313" containerName="registry-server" containerID="cri-o://1bc88e72e9842d6a2b9a4a950e35fef7297e878c6c65447dee7a8d003c08b5f3" gracePeriod=2 Dec 02 15:35:47 crc kubenswrapper[4902]: E1202 15:35:47.454614 4902 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod01d25719_c8a2_430d_b412_1c4b1fa71313.slice/crio-conmon-1bc88e72e9842d6a2b9a4a950e35fef7297e878c6c65447dee7a8d003c08b5f3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod01d25719_c8a2_430d_b412_1c4b1fa71313.slice/crio-1bc88e72e9842d6a2b9a4a950e35fef7297e878c6c65447dee7a8d003c08b5f3.scope\": RecentStats: unable to find data in memory cache]" Dec 02 15:35:47 crc kubenswrapper[4902]: I1202 15:35:47.765116 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:47 crc kubenswrapper[4902]: I1202 15:35:47.899226 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01d25719-c8a2-430d-b412-1c4b1fa71313-utilities\") pod \"01d25719-c8a2-430d-b412-1c4b1fa71313\" (UID: \"01d25719-c8a2-430d-b412-1c4b1fa71313\") " Dec 02 15:35:47 crc kubenswrapper[4902]: I1202 15:35:47.899267 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stnz6\" (UniqueName: \"kubernetes.io/projected/01d25719-c8a2-430d-b412-1c4b1fa71313-kube-api-access-stnz6\") pod \"01d25719-c8a2-430d-b412-1c4b1fa71313\" (UID: \"01d25719-c8a2-430d-b412-1c4b1fa71313\") " Dec 02 15:35:47 crc kubenswrapper[4902]: I1202 15:35:47.899368 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01d25719-c8a2-430d-b412-1c4b1fa71313-catalog-content\") pod \"01d25719-c8a2-430d-b412-1c4b1fa71313\" (UID: \"01d25719-c8a2-430d-b412-1c4b1fa71313\") " Dec 02 15:35:47 crc kubenswrapper[4902]: I1202 15:35:47.901153 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01d25719-c8a2-430d-b412-1c4b1fa71313-utilities" (OuterVolumeSpecName: "utilities") pod "01d25719-c8a2-430d-b412-1c4b1fa71313" (UID: "01d25719-c8a2-430d-b412-1c4b1fa71313"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:35:47 crc kubenswrapper[4902]: I1202 15:35:47.906732 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01d25719-c8a2-430d-b412-1c4b1fa71313-kube-api-access-stnz6" (OuterVolumeSpecName: "kube-api-access-stnz6") pod "01d25719-c8a2-430d-b412-1c4b1fa71313" (UID: "01d25719-c8a2-430d-b412-1c4b1fa71313"). InnerVolumeSpecName "kube-api-access-stnz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.001645 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stnz6\" (UniqueName: \"kubernetes.io/projected/01d25719-c8a2-430d-b412-1c4b1fa71313-kube-api-access-stnz6\") on node \"crc\" DevicePath \"\"" Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.001680 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01d25719-c8a2-430d-b412-1c4b1fa71313-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.021545 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01d25719-c8a2-430d-b412-1c4b1fa71313-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "01d25719-c8a2-430d-b412-1c4b1fa71313" (UID: "01d25719-c8a2-430d-b412-1c4b1fa71313"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.103593 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01d25719-c8a2-430d-b412-1c4b1fa71313-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.166177 4902 generic.go:334] "Generic (PLEG): container finished" podID="01d25719-c8a2-430d-b412-1c4b1fa71313" containerID="1bc88e72e9842d6a2b9a4a950e35fef7297e878c6c65447dee7a8d003c08b5f3" exitCode=0 Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.166250 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vxkhh" event={"ID":"01d25719-c8a2-430d-b412-1c4b1fa71313","Type":"ContainerDied","Data":"1bc88e72e9842d6a2b9a4a950e35fef7297e878c6c65447dee7a8d003c08b5f3"} Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.166315 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vxkhh" Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.166349 4902 scope.go:117] "RemoveContainer" containerID="1bc88e72e9842d6a2b9a4a950e35fef7297e878c6c65447dee7a8d003c08b5f3" Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.166327 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vxkhh" event={"ID":"01d25719-c8a2-430d-b412-1c4b1fa71313","Type":"ContainerDied","Data":"da9c3e408ef10f37e96841da9ca5ebd644795c50a7394c56e8930933d9c131a4"} Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.205102 4902 scope.go:117] "RemoveContainer" containerID="b7d2c728a470d8cac240808c92b764b4bdb51bed804024e81aa9e5ea2da60154" Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.228972 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vxkhh"] Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.234556 4902 scope.go:117] "RemoveContainer" containerID="05a0733a137a61486e04057342407b5aaea53370af9d7b684fa5208e939a882c" Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.241351 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vxkhh"] Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.276780 4902 scope.go:117] "RemoveContainer" containerID="1bc88e72e9842d6a2b9a4a950e35fef7297e878c6c65447dee7a8d003c08b5f3" Dec 02 15:35:48 crc kubenswrapper[4902]: E1202 15:35:48.277150 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bc88e72e9842d6a2b9a4a950e35fef7297e878c6c65447dee7a8d003c08b5f3\": container with ID starting with 1bc88e72e9842d6a2b9a4a950e35fef7297e878c6c65447dee7a8d003c08b5f3 not found: ID does not exist" containerID="1bc88e72e9842d6a2b9a4a950e35fef7297e878c6c65447dee7a8d003c08b5f3" Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.277205 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bc88e72e9842d6a2b9a4a950e35fef7297e878c6c65447dee7a8d003c08b5f3"} err="failed to get container status \"1bc88e72e9842d6a2b9a4a950e35fef7297e878c6c65447dee7a8d003c08b5f3\": rpc error: code = NotFound desc = could not find container \"1bc88e72e9842d6a2b9a4a950e35fef7297e878c6c65447dee7a8d003c08b5f3\": container with ID starting with 1bc88e72e9842d6a2b9a4a950e35fef7297e878c6c65447dee7a8d003c08b5f3 not found: ID does not exist" Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.277234 4902 scope.go:117] "RemoveContainer" containerID="b7d2c728a470d8cac240808c92b764b4bdb51bed804024e81aa9e5ea2da60154" Dec 02 15:35:48 crc kubenswrapper[4902]: E1202 15:35:48.277519 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7d2c728a470d8cac240808c92b764b4bdb51bed804024e81aa9e5ea2da60154\": container with ID starting with b7d2c728a470d8cac240808c92b764b4bdb51bed804024e81aa9e5ea2da60154 not found: ID does not exist" containerID="b7d2c728a470d8cac240808c92b764b4bdb51bed804024e81aa9e5ea2da60154" Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.277555 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7d2c728a470d8cac240808c92b764b4bdb51bed804024e81aa9e5ea2da60154"} err="failed to get container status \"b7d2c728a470d8cac240808c92b764b4bdb51bed804024e81aa9e5ea2da60154\": rpc error: code = NotFound desc = could not find container \"b7d2c728a470d8cac240808c92b764b4bdb51bed804024e81aa9e5ea2da60154\": container with ID starting with b7d2c728a470d8cac240808c92b764b4bdb51bed804024e81aa9e5ea2da60154 not found: ID does not exist" Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.277600 4902 scope.go:117] "RemoveContainer" containerID="05a0733a137a61486e04057342407b5aaea53370af9d7b684fa5208e939a882c" Dec 02 15:35:48 crc kubenswrapper[4902]: E1202 15:35:48.277836 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05a0733a137a61486e04057342407b5aaea53370af9d7b684fa5208e939a882c\": container with ID starting with 05a0733a137a61486e04057342407b5aaea53370af9d7b684fa5208e939a882c not found: ID does not exist" containerID="05a0733a137a61486e04057342407b5aaea53370af9d7b684fa5208e939a882c" Dec 02 15:35:48 crc kubenswrapper[4902]: I1202 15:35:48.277860 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05a0733a137a61486e04057342407b5aaea53370af9d7b684fa5208e939a882c"} err="failed to get container status \"05a0733a137a61486e04057342407b5aaea53370af9d7b684fa5208e939a882c\": rpc error: code = NotFound desc = could not find container \"05a0733a137a61486e04057342407b5aaea53370af9d7b684fa5208e939a882c\": container with ID starting with 05a0733a137a61486e04057342407b5aaea53370af9d7b684fa5208e939a882c not found: ID does not exist" Dec 02 15:35:49 crc kubenswrapper[4902]: I1202 15:35:49.122923 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01d25719-c8a2-430d-b412-1c4b1fa71313" path="/var/lib/kubelet/pods/01d25719-c8a2-430d-b412-1c4b1fa71313/volumes" Dec 02 15:35:59 crc kubenswrapper[4902]: I1202 15:35:59.123950 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:35:59 crc kubenswrapper[4902]: E1202 15:35:59.124676 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:36:11 crc kubenswrapper[4902]: I1202 15:36:11.108152 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:36:11 crc kubenswrapper[4902]: E1202 15:36:11.109119 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:36:23 crc kubenswrapper[4902]: I1202 15:36:23.106891 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:36:23 crc kubenswrapper[4902]: E1202 15:36:23.107659 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:36:37 crc kubenswrapper[4902]: I1202 15:36:37.106810 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:36:37 crc kubenswrapper[4902]: E1202 15:36:37.107926 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:36:51 crc kubenswrapper[4902]: I1202 15:36:51.107134 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:36:51 crc kubenswrapper[4902]: E1202 15:36:51.108199 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:37:05 crc kubenswrapper[4902]: I1202 15:37:05.107753 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:37:05 crc kubenswrapper[4902]: E1202 15:37:05.108960 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:37:20 crc kubenswrapper[4902]: I1202 15:37:20.107350 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:37:20 crc kubenswrapper[4902]: E1202 15:37:20.108413 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:37:32 crc kubenswrapper[4902]: I1202 15:37:32.107593 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:37:32 crc kubenswrapper[4902]: E1202 15:37:32.108693 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:37:46 crc kubenswrapper[4902]: I1202 15:37:46.106738 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:37:46 crc kubenswrapper[4902]: I1202 15:37:46.637113 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"d874727028053e419314c0c14db863d86b27cc896b75b0a0805a70ab67d01348"} Dec 02 15:38:14 crc kubenswrapper[4902]: I1202 15:38:14.764149 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c6wzb"] Dec 02 15:38:14 crc kubenswrapper[4902]: E1202 15:38:14.765170 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01d25719-c8a2-430d-b412-1c4b1fa71313" containerName="registry-server" Dec 02 15:38:14 crc kubenswrapper[4902]: I1202 15:38:14.765187 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="01d25719-c8a2-430d-b412-1c4b1fa71313" containerName="registry-server" Dec 02 15:38:14 crc kubenswrapper[4902]: E1202 15:38:14.765229 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01d25719-c8a2-430d-b412-1c4b1fa71313" containerName="extract-utilities" Dec 02 15:38:14 crc kubenswrapper[4902]: I1202 15:38:14.765237 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="01d25719-c8a2-430d-b412-1c4b1fa71313" containerName="extract-utilities" Dec 02 15:38:14 crc kubenswrapper[4902]: E1202 15:38:14.765254 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01d25719-c8a2-430d-b412-1c4b1fa71313" containerName="extract-content" Dec 02 15:38:14 crc kubenswrapper[4902]: I1202 15:38:14.765262 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="01d25719-c8a2-430d-b412-1c4b1fa71313" containerName="extract-content" Dec 02 15:38:14 crc kubenswrapper[4902]: I1202 15:38:14.765527 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="01d25719-c8a2-430d-b412-1c4b1fa71313" containerName="registry-server" Dec 02 15:38:14 crc kubenswrapper[4902]: I1202 15:38:14.767344 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:14 crc kubenswrapper[4902]: I1202 15:38:14.797133 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c6wzb"] Dec 02 15:38:14 crc kubenswrapper[4902]: I1202 15:38:14.896695 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a490d23b-6d82-4a31-8924-5dae14e9a716-catalog-content\") pod \"certified-operators-c6wzb\" (UID: \"a490d23b-6d82-4a31-8924-5dae14e9a716\") " pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:14 crc kubenswrapper[4902]: I1202 15:38:14.897080 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tc8kk\" (UniqueName: \"kubernetes.io/projected/a490d23b-6d82-4a31-8924-5dae14e9a716-kube-api-access-tc8kk\") pod \"certified-operators-c6wzb\" (UID: \"a490d23b-6d82-4a31-8924-5dae14e9a716\") " pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:14 crc kubenswrapper[4902]: I1202 15:38:14.897275 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a490d23b-6d82-4a31-8924-5dae14e9a716-utilities\") pod \"certified-operators-c6wzb\" (UID: \"a490d23b-6d82-4a31-8924-5dae14e9a716\") " pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:14 crc kubenswrapper[4902]: I1202 15:38:14.998982 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a490d23b-6d82-4a31-8924-5dae14e9a716-utilities\") pod \"certified-operators-c6wzb\" (UID: \"a490d23b-6d82-4a31-8924-5dae14e9a716\") " pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:14 crc kubenswrapper[4902]: I1202 15:38:14.999077 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a490d23b-6d82-4a31-8924-5dae14e9a716-catalog-content\") pod \"certified-operators-c6wzb\" (UID: \"a490d23b-6d82-4a31-8924-5dae14e9a716\") " pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:14 crc kubenswrapper[4902]: I1202 15:38:14.999210 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tc8kk\" (UniqueName: \"kubernetes.io/projected/a490d23b-6d82-4a31-8924-5dae14e9a716-kube-api-access-tc8kk\") pod \"certified-operators-c6wzb\" (UID: \"a490d23b-6d82-4a31-8924-5dae14e9a716\") " pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:14 crc kubenswrapper[4902]: I1202 15:38:14.999770 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a490d23b-6d82-4a31-8924-5dae14e9a716-utilities\") pod \"certified-operators-c6wzb\" (UID: \"a490d23b-6d82-4a31-8924-5dae14e9a716\") " pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:14 crc kubenswrapper[4902]: I1202 15:38:14.999782 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a490d23b-6d82-4a31-8924-5dae14e9a716-catalog-content\") pod \"certified-operators-c6wzb\" (UID: \"a490d23b-6d82-4a31-8924-5dae14e9a716\") " pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:15 crc kubenswrapper[4902]: I1202 15:38:15.025251 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tc8kk\" (UniqueName: \"kubernetes.io/projected/a490d23b-6d82-4a31-8924-5dae14e9a716-kube-api-access-tc8kk\") pod \"certified-operators-c6wzb\" (UID: \"a490d23b-6d82-4a31-8924-5dae14e9a716\") " pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:15 crc kubenswrapper[4902]: I1202 15:38:15.126935 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:15 crc kubenswrapper[4902]: I1202 15:38:15.659693 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c6wzb"] Dec 02 15:38:15 crc kubenswrapper[4902]: W1202 15:38:15.672254 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda490d23b_6d82_4a31_8924_5dae14e9a716.slice/crio-d61c8edde16e7b7bc0b67e7e843afe13edc43d44878cd96b4e6073958d838355 WatchSource:0}: Error finding container d61c8edde16e7b7bc0b67e7e843afe13edc43d44878cd96b4e6073958d838355: Status 404 returned error can't find the container with id d61c8edde16e7b7bc0b67e7e843afe13edc43d44878cd96b4e6073958d838355 Dec 02 15:38:15 crc kubenswrapper[4902]: I1202 15:38:15.959996 4902 generic.go:334] "Generic (PLEG): container finished" podID="a490d23b-6d82-4a31-8924-5dae14e9a716" containerID="6cfa341cd86a576b761ebc66103ba2bae2ba77cf77d132ae527dc0cff01b45bc" exitCode=0 Dec 02 15:38:15 crc kubenswrapper[4902]: I1202 15:38:15.960038 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6wzb" event={"ID":"a490d23b-6d82-4a31-8924-5dae14e9a716","Type":"ContainerDied","Data":"6cfa341cd86a576b761ebc66103ba2bae2ba77cf77d132ae527dc0cff01b45bc"} Dec 02 15:38:15 crc kubenswrapper[4902]: I1202 15:38:15.960112 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6wzb" event={"ID":"a490d23b-6d82-4a31-8924-5dae14e9a716","Type":"ContainerStarted","Data":"d61c8edde16e7b7bc0b67e7e843afe13edc43d44878cd96b4e6073958d838355"} Dec 02 15:38:16 crc kubenswrapper[4902]: I1202 15:38:16.972584 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6wzb" event={"ID":"a490d23b-6d82-4a31-8924-5dae14e9a716","Type":"ContainerStarted","Data":"a04e13b1ae2c2432236e113e6c55c87c851d75966c5f38f4e2629f570f9b9329"} Dec 02 15:38:17 crc kubenswrapper[4902]: I1202 15:38:17.987775 4902 generic.go:334] "Generic (PLEG): container finished" podID="a490d23b-6d82-4a31-8924-5dae14e9a716" containerID="a04e13b1ae2c2432236e113e6c55c87c851d75966c5f38f4e2629f570f9b9329" exitCode=0 Dec 02 15:38:17 crc kubenswrapper[4902]: I1202 15:38:17.987876 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6wzb" event={"ID":"a490d23b-6d82-4a31-8924-5dae14e9a716","Type":"ContainerDied","Data":"a04e13b1ae2c2432236e113e6c55c87c851d75966c5f38f4e2629f570f9b9329"} Dec 02 15:38:19 crc kubenswrapper[4902]: I1202 15:38:19.004951 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6wzb" event={"ID":"a490d23b-6d82-4a31-8924-5dae14e9a716","Type":"ContainerStarted","Data":"5699fe96e8431e9a3cd74b6d7a3d4f201cc95e6bf034c534cd58931375a46966"} Dec 02 15:38:19 crc kubenswrapper[4902]: I1202 15:38:19.039338 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c6wzb" podStartSLOduration=2.555633384 podStartE2EDuration="5.039311412s" podCreationTimestamp="2025-12-02 15:38:14 +0000 UTC" firstStartedPulling="2025-12-02 15:38:15.9647211 +0000 UTC m=+4927.156029809" lastFinishedPulling="2025-12-02 15:38:18.448399128 +0000 UTC m=+4929.639707837" observedRunningTime="2025-12-02 15:38:19.03675792 +0000 UTC m=+4930.228066669" watchObservedRunningTime="2025-12-02 15:38:19.039311412 +0000 UTC m=+4930.230620161" Dec 02 15:38:25 crc kubenswrapper[4902]: I1202 15:38:25.128302 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:25 crc kubenswrapper[4902]: I1202 15:38:25.128617 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:25 crc kubenswrapper[4902]: I1202 15:38:25.512850 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:26 crc kubenswrapper[4902]: I1202 15:38:26.175220 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:26 crc kubenswrapper[4902]: I1202 15:38:26.235637 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c6wzb"] Dec 02 15:38:28 crc kubenswrapper[4902]: I1202 15:38:28.101146 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c6wzb" podUID="a490d23b-6d82-4a31-8924-5dae14e9a716" containerName="registry-server" containerID="cri-o://5699fe96e8431e9a3cd74b6d7a3d4f201cc95e6bf034c534cd58931375a46966" gracePeriod=2 Dec 02 15:38:28 crc kubenswrapper[4902]: I1202 15:38:28.661764 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:28 crc kubenswrapper[4902]: I1202 15:38:28.830144 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a490d23b-6d82-4a31-8924-5dae14e9a716-catalog-content\") pod \"a490d23b-6d82-4a31-8924-5dae14e9a716\" (UID: \"a490d23b-6d82-4a31-8924-5dae14e9a716\") " Dec 02 15:38:28 crc kubenswrapper[4902]: I1202 15:38:28.830291 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a490d23b-6d82-4a31-8924-5dae14e9a716-utilities\") pod \"a490d23b-6d82-4a31-8924-5dae14e9a716\" (UID: \"a490d23b-6d82-4a31-8924-5dae14e9a716\") " Dec 02 15:38:28 crc kubenswrapper[4902]: I1202 15:38:28.830498 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tc8kk\" (UniqueName: \"kubernetes.io/projected/a490d23b-6d82-4a31-8924-5dae14e9a716-kube-api-access-tc8kk\") pod \"a490d23b-6d82-4a31-8924-5dae14e9a716\" (UID: \"a490d23b-6d82-4a31-8924-5dae14e9a716\") " Dec 02 15:38:28 crc kubenswrapper[4902]: I1202 15:38:28.831914 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a490d23b-6d82-4a31-8924-5dae14e9a716-utilities" (OuterVolumeSpecName: "utilities") pod "a490d23b-6d82-4a31-8924-5dae14e9a716" (UID: "a490d23b-6d82-4a31-8924-5dae14e9a716"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:38:28 crc kubenswrapper[4902]: I1202 15:38:28.837279 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a490d23b-6d82-4a31-8924-5dae14e9a716-kube-api-access-tc8kk" (OuterVolumeSpecName: "kube-api-access-tc8kk") pod "a490d23b-6d82-4a31-8924-5dae14e9a716" (UID: "a490d23b-6d82-4a31-8924-5dae14e9a716"). InnerVolumeSpecName "kube-api-access-tc8kk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:38:28 crc kubenswrapper[4902]: I1202 15:38:28.878859 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a490d23b-6d82-4a31-8924-5dae14e9a716-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a490d23b-6d82-4a31-8924-5dae14e9a716" (UID: "a490d23b-6d82-4a31-8924-5dae14e9a716"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:38:28 crc kubenswrapper[4902]: I1202 15:38:28.933307 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a490d23b-6d82-4a31-8924-5dae14e9a716-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:38:28 crc kubenswrapper[4902]: I1202 15:38:28.933360 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a490d23b-6d82-4a31-8924-5dae14e9a716-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:38:28 crc kubenswrapper[4902]: I1202 15:38:28.933384 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tc8kk\" (UniqueName: \"kubernetes.io/projected/a490d23b-6d82-4a31-8924-5dae14e9a716-kube-api-access-tc8kk\") on node \"crc\" DevicePath \"\"" Dec 02 15:38:29 crc kubenswrapper[4902]: I1202 15:38:29.116553 4902 generic.go:334] "Generic (PLEG): container finished" podID="a490d23b-6d82-4a31-8924-5dae14e9a716" containerID="5699fe96e8431e9a3cd74b6d7a3d4f201cc95e6bf034c534cd58931375a46966" exitCode=0 Dec 02 15:38:29 crc kubenswrapper[4902]: I1202 15:38:29.124546 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6wzb" Dec 02 15:38:29 crc kubenswrapper[4902]: I1202 15:38:29.126191 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6wzb" event={"ID":"a490d23b-6d82-4a31-8924-5dae14e9a716","Type":"ContainerDied","Data":"5699fe96e8431e9a3cd74b6d7a3d4f201cc95e6bf034c534cd58931375a46966"} Dec 02 15:38:29 crc kubenswrapper[4902]: I1202 15:38:29.126234 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6wzb" event={"ID":"a490d23b-6d82-4a31-8924-5dae14e9a716","Type":"ContainerDied","Data":"d61c8edde16e7b7bc0b67e7e843afe13edc43d44878cd96b4e6073958d838355"} Dec 02 15:38:29 crc kubenswrapper[4902]: I1202 15:38:29.126273 4902 scope.go:117] "RemoveContainer" containerID="5699fe96e8431e9a3cd74b6d7a3d4f201cc95e6bf034c534cd58931375a46966" Dec 02 15:38:29 crc kubenswrapper[4902]: I1202 15:38:29.161450 4902 scope.go:117] "RemoveContainer" containerID="a04e13b1ae2c2432236e113e6c55c87c851d75966c5f38f4e2629f570f9b9329" Dec 02 15:38:29 crc kubenswrapper[4902]: I1202 15:38:29.193695 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c6wzb"] Dec 02 15:38:29 crc kubenswrapper[4902]: I1202 15:38:29.202320 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c6wzb"] Dec 02 15:38:29 crc kubenswrapper[4902]: I1202 15:38:29.208714 4902 scope.go:117] "RemoveContainer" containerID="6cfa341cd86a576b761ebc66103ba2bae2ba77cf77d132ae527dc0cff01b45bc" Dec 02 15:38:29 crc kubenswrapper[4902]: I1202 15:38:29.252899 4902 scope.go:117] "RemoveContainer" containerID="5699fe96e8431e9a3cd74b6d7a3d4f201cc95e6bf034c534cd58931375a46966" Dec 02 15:38:29 crc kubenswrapper[4902]: E1202 15:38:29.253478 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5699fe96e8431e9a3cd74b6d7a3d4f201cc95e6bf034c534cd58931375a46966\": container with ID starting with 5699fe96e8431e9a3cd74b6d7a3d4f201cc95e6bf034c534cd58931375a46966 not found: ID does not exist" containerID="5699fe96e8431e9a3cd74b6d7a3d4f201cc95e6bf034c534cd58931375a46966" Dec 02 15:38:29 crc kubenswrapper[4902]: I1202 15:38:29.253519 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5699fe96e8431e9a3cd74b6d7a3d4f201cc95e6bf034c534cd58931375a46966"} err="failed to get container status \"5699fe96e8431e9a3cd74b6d7a3d4f201cc95e6bf034c534cd58931375a46966\": rpc error: code = NotFound desc = could not find container \"5699fe96e8431e9a3cd74b6d7a3d4f201cc95e6bf034c534cd58931375a46966\": container with ID starting with 5699fe96e8431e9a3cd74b6d7a3d4f201cc95e6bf034c534cd58931375a46966 not found: ID does not exist" Dec 02 15:38:29 crc kubenswrapper[4902]: I1202 15:38:29.253546 4902 scope.go:117] "RemoveContainer" containerID="a04e13b1ae2c2432236e113e6c55c87c851d75966c5f38f4e2629f570f9b9329" Dec 02 15:38:29 crc kubenswrapper[4902]: E1202 15:38:29.253920 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a04e13b1ae2c2432236e113e6c55c87c851d75966c5f38f4e2629f570f9b9329\": container with ID starting with a04e13b1ae2c2432236e113e6c55c87c851d75966c5f38f4e2629f570f9b9329 not found: ID does not exist" containerID="a04e13b1ae2c2432236e113e6c55c87c851d75966c5f38f4e2629f570f9b9329" Dec 02 15:38:29 crc kubenswrapper[4902]: I1202 15:38:29.253952 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a04e13b1ae2c2432236e113e6c55c87c851d75966c5f38f4e2629f570f9b9329"} err="failed to get container status \"a04e13b1ae2c2432236e113e6c55c87c851d75966c5f38f4e2629f570f9b9329\": rpc error: code = NotFound desc = could not find container \"a04e13b1ae2c2432236e113e6c55c87c851d75966c5f38f4e2629f570f9b9329\": container with ID starting with a04e13b1ae2c2432236e113e6c55c87c851d75966c5f38f4e2629f570f9b9329 not found: ID does not exist" Dec 02 15:38:29 crc kubenswrapper[4902]: I1202 15:38:29.253970 4902 scope.go:117] "RemoveContainer" containerID="6cfa341cd86a576b761ebc66103ba2bae2ba77cf77d132ae527dc0cff01b45bc" Dec 02 15:38:29 crc kubenswrapper[4902]: E1202 15:38:29.254247 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cfa341cd86a576b761ebc66103ba2bae2ba77cf77d132ae527dc0cff01b45bc\": container with ID starting with 6cfa341cd86a576b761ebc66103ba2bae2ba77cf77d132ae527dc0cff01b45bc not found: ID does not exist" containerID="6cfa341cd86a576b761ebc66103ba2bae2ba77cf77d132ae527dc0cff01b45bc" Dec 02 15:38:29 crc kubenswrapper[4902]: I1202 15:38:29.254270 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cfa341cd86a576b761ebc66103ba2bae2ba77cf77d132ae527dc0cff01b45bc"} err="failed to get container status \"6cfa341cd86a576b761ebc66103ba2bae2ba77cf77d132ae527dc0cff01b45bc\": rpc error: code = NotFound desc = could not find container \"6cfa341cd86a576b761ebc66103ba2bae2ba77cf77d132ae527dc0cff01b45bc\": container with ID starting with 6cfa341cd86a576b761ebc66103ba2bae2ba77cf77d132ae527dc0cff01b45bc not found: ID does not exist" Dec 02 15:38:31 crc kubenswrapper[4902]: I1202 15:38:31.123487 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a490d23b-6d82-4a31-8924-5dae14e9a716" path="/var/lib/kubelet/pods/a490d23b-6d82-4a31-8924-5dae14e9a716/volumes" Dec 02 15:38:40 crc kubenswrapper[4902]: I1202 15:38:40.257657 4902 generic.go:334] "Generic (PLEG): container finished" podID="3469f18f-c530-4a00-91ba-95720c45b4c2" containerID="9db67023a14534dd3abe8c6358e68f675c257f28d1b164fa902d65ab41d5e998" exitCode=0 Dec 02 15:38:40 crc kubenswrapper[4902]: I1202 15:38:40.257718 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"3469f18f-c530-4a00-91ba-95720c45b4c2","Type":"ContainerDied","Data":"9db67023a14534dd3abe8c6358e68f675c257f28d1b164fa902d65ab41d5e998"} Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.726734 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.823381 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3469f18f-c530-4a00-91ba-95720c45b4c2-config-data\") pod \"3469f18f-c530-4a00-91ba-95720c45b4c2\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.823751 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3469f18f-c530-4a00-91ba-95720c45b4c2-openstack-config\") pod \"3469f18f-c530-4a00-91ba-95720c45b4c2\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.823956 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8qd2\" (UniqueName: \"kubernetes.io/projected/3469f18f-c530-4a00-91ba-95720c45b4c2-kube-api-access-b8qd2\") pod \"3469f18f-c530-4a00-91ba-95720c45b4c2\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.824065 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-ca-certs\") pod \"3469f18f-c530-4a00-91ba-95720c45b4c2\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.824151 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/3469f18f-c530-4a00-91ba-95720c45b4c2-test-operator-ephemeral-temporary\") pod \"3469f18f-c530-4a00-91ba-95720c45b4c2\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.824233 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/3469f18f-c530-4a00-91ba-95720c45b4c2-test-operator-ephemeral-workdir\") pod \"3469f18f-c530-4a00-91ba-95720c45b4c2\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.824443 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-openstack-config-secret\") pod \"3469f18f-c530-4a00-91ba-95720c45b4c2\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.824654 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"3469f18f-c530-4a00-91ba-95720c45b4c2\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.824802 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-ssh-key\") pod \"3469f18f-c530-4a00-91ba-95720c45b4c2\" (UID: \"3469f18f-c530-4a00-91ba-95720c45b4c2\") " Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.824690 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3469f18f-c530-4a00-91ba-95720c45b4c2-config-data" (OuterVolumeSpecName: "config-data") pod "3469f18f-c530-4a00-91ba-95720c45b4c2" (UID: "3469f18f-c530-4a00-91ba-95720c45b4c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.825504 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3469f18f-c530-4a00-91ba-95720c45b4c2-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "3469f18f-c530-4a00-91ba-95720c45b4c2" (UID: "3469f18f-c530-4a00-91ba-95720c45b4c2"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.826435 4902 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3469f18f-c530-4a00-91ba-95720c45b4c2-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.826540 4902 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/3469f18f-c530-4a00-91ba-95720c45b4c2-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.831059 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "test-operator-logs") pod "3469f18f-c530-4a00-91ba-95720c45b4c2" (UID: "3469f18f-c530-4a00-91ba-95720c45b4c2"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.833463 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3469f18f-c530-4a00-91ba-95720c45b4c2-kube-api-access-b8qd2" (OuterVolumeSpecName: "kube-api-access-b8qd2") pod "3469f18f-c530-4a00-91ba-95720c45b4c2" (UID: "3469f18f-c530-4a00-91ba-95720c45b4c2"). InnerVolumeSpecName "kube-api-access-b8qd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.858862 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "3469f18f-c530-4a00-91ba-95720c45b4c2" (UID: "3469f18f-c530-4a00-91ba-95720c45b4c2"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.864072 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "3469f18f-c530-4a00-91ba-95720c45b4c2" (UID: "3469f18f-c530-4a00-91ba-95720c45b4c2"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.868461 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3469f18f-c530-4a00-91ba-95720c45b4c2" (UID: "3469f18f-c530-4a00-91ba-95720c45b4c2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.900716 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3469f18f-c530-4a00-91ba-95720c45b4c2-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "3469f18f-c530-4a00-91ba-95720c45b4c2" (UID: "3469f18f-c530-4a00-91ba-95720c45b4c2"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.925631 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3469f18f-c530-4a00-91ba-95720c45b4c2-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "3469f18f-c530-4a00-91ba-95720c45b4c2" (UID: "3469f18f-c530-4a00-91ba-95720c45b4c2"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.928452 4902 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.928494 4902 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3469f18f-c530-4a00-91ba-95720c45b4c2-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.928509 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8qd2\" (UniqueName: \"kubernetes.io/projected/3469f18f-c530-4a00-91ba-95720c45b4c2-kube-api-access-b8qd2\") on node \"crc\" DevicePath \"\"" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.928519 4902 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.928529 4902 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/3469f18f-c530-4a00-91ba-95720c45b4c2-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.928539 4902 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3469f18f-c530-4a00-91ba-95720c45b4c2-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.928588 4902 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 02 15:38:41 crc kubenswrapper[4902]: I1202 15:38:41.950911 4902 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 02 15:38:42 crc kubenswrapper[4902]: I1202 15:38:42.029982 4902 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 02 15:38:42 crc kubenswrapper[4902]: I1202 15:38:42.282361 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"3469f18f-c530-4a00-91ba-95720c45b4c2","Type":"ContainerDied","Data":"0d1f4521f335f96cfd0179d56ebfc38a96cf149275497f27093d291b56b82ffc"} Dec 02 15:38:42 crc kubenswrapper[4902]: I1202 15:38:42.282398 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d1f4521f335f96cfd0179d56ebfc38a96cf149275497f27093d291b56b82ffc" Dec 02 15:38:42 crc kubenswrapper[4902]: I1202 15:38:42.282400 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 15:38:48 crc kubenswrapper[4902]: I1202 15:38:48.903795 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 02 15:38:48 crc kubenswrapper[4902]: E1202 15:38:48.905345 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a490d23b-6d82-4a31-8924-5dae14e9a716" containerName="extract-utilities" Dec 02 15:38:48 crc kubenswrapper[4902]: I1202 15:38:48.905366 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="a490d23b-6d82-4a31-8924-5dae14e9a716" containerName="extract-utilities" Dec 02 15:38:48 crc kubenswrapper[4902]: E1202 15:38:48.905385 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3469f18f-c530-4a00-91ba-95720c45b4c2" containerName="tempest-tests-tempest-tests-runner" Dec 02 15:38:48 crc kubenswrapper[4902]: I1202 15:38:48.905392 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="3469f18f-c530-4a00-91ba-95720c45b4c2" containerName="tempest-tests-tempest-tests-runner" Dec 02 15:38:48 crc kubenswrapper[4902]: E1202 15:38:48.905404 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a490d23b-6d82-4a31-8924-5dae14e9a716" containerName="extract-content" Dec 02 15:38:48 crc kubenswrapper[4902]: I1202 15:38:48.905411 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="a490d23b-6d82-4a31-8924-5dae14e9a716" containerName="extract-content" Dec 02 15:38:48 crc kubenswrapper[4902]: E1202 15:38:48.905425 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a490d23b-6d82-4a31-8924-5dae14e9a716" containerName="registry-server" Dec 02 15:38:48 crc kubenswrapper[4902]: I1202 15:38:48.905432 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="a490d23b-6d82-4a31-8924-5dae14e9a716" containerName="registry-server" Dec 02 15:38:48 crc kubenswrapper[4902]: I1202 15:38:48.905690 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="3469f18f-c530-4a00-91ba-95720c45b4c2" containerName="tempest-tests-tempest-tests-runner" Dec 02 15:38:48 crc kubenswrapper[4902]: I1202 15:38:48.905727 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="a490d23b-6d82-4a31-8924-5dae14e9a716" containerName="registry-server" Dec 02 15:38:48 crc kubenswrapper[4902]: I1202 15:38:48.907231 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 15:38:48 crc kubenswrapper[4902]: I1202 15:38:48.911600 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-vc7jf" Dec 02 15:38:48 crc kubenswrapper[4902]: I1202 15:38:48.919017 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 02 15:38:49 crc kubenswrapper[4902]: I1202 15:38:49.079733 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ddbe7574-fbe4-4d9a-a50d-d3f1986e0b4d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 15:38:49 crc kubenswrapper[4902]: I1202 15:38:49.080260 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdf7z\" (UniqueName: \"kubernetes.io/projected/ddbe7574-fbe4-4d9a-a50d-d3f1986e0b4d-kube-api-access-pdf7z\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ddbe7574-fbe4-4d9a-a50d-d3f1986e0b4d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 15:38:49 crc kubenswrapper[4902]: I1202 15:38:49.182634 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdf7z\" (UniqueName: \"kubernetes.io/projected/ddbe7574-fbe4-4d9a-a50d-d3f1986e0b4d-kube-api-access-pdf7z\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ddbe7574-fbe4-4d9a-a50d-d3f1986e0b4d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 15:38:49 crc kubenswrapper[4902]: I1202 15:38:49.182893 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ddbe7574-fbe4-4d9a-a50d-d3f1986e0b4d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 15:38:49 crc kubenswrapper[4902]: I1202 15:38:49.184209 4902 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ddbe7574-fbe4-4d9a-a50d-d3f1986e0b4d\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 15:38:49 crc kubenswrapper[4902]: I1202 15:38:49.813367 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdf7z\" (UniqueName: \"kubernetes.io/projected/ddbe7574-fbe4-4d9a-a50d-d3f1986e0b4d-kube-api-access-pdf7z\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ddbe7574-fbe4-4d9a-a50d-d3f1986e0b4d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 15:38:49 crc kubenswrapper[4902]: I1202 15:38:49.855944 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ddbe7574-fbe4-4d9a-a50d-d3f1986e0b4d\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 15:38:50 crc kubenswrapper[4902]: I1202 15:38:50.137455 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 15:38:50 crc kubenswrapper[4902]: I1202 15:38:50.688688 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 02 15:38:51 crc kubenswrapper[4902]: I1202 15:38:51.395633 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"ddbe7574-fbe4-4d9a-a50d-d3f1986e0b4d","Type":"ContainerStarted","Data":"6f811f8af030db480c12a54360d5a025cb8befe5b4fea52a0cfe087038b4f2e7"} Dec 02 15:38:53 crc kubenswrapper[4902]: I1202 15:38:53.434293 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"ddbe7574-fbe4-4d9a-a50d-d3f1986e0b4d","Type":"ContainerStarted","Data":"d0a4c2161c04672c32f2a0d7f8f2131fe68897587271200088ae5da6f0e11b9f"} Dec 02 15:38:53 crc kubenswrapper[4902]: I1202 15:38:53.459428 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=3.853037282 podStartE2EDuration="5.459404536s" podCreationTimestamp="2025-12-02 15:38:48 +0000 UTC" firstStartedPulling="2025-12-02 15:38:50.694994151 +0000 UTC m=+4961.886302860" lastFinishedPulling="2025-12-02 15:38:52.301361385 +0000 UTC m=+4963.492670114" observedRunningTime="2025-12-02 15:38:53.451832572 +0000 UTC m=+4964.643141321" watchObservedRunningTime="2025-12-02 15:38:53.459404536 +0000 UTC m=+4964.650713275" Dec 02 15:39:07 crc kubenswrapper[4902]: I1202 15:39:07.563309 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-q4567"] Dec 02 15:39:07 crc kubenswrapper[4902]: I1202 15:39:07.568243 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:07 crc kubenswrapper[4902]: I1202 15:39:07.612287 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nj8fp\" (UniqueName: \"kubernetes.io/projected/2f834b25-2305-4acc-9998-eca594071796-kube-api-access-nj8fp\") pod \"community-operators-q4567\" (UID: \"2f834b25-2305-4acc-9998-eca594071796\") " pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:07 crc kubenswrapper[4902]: I1202 15:39:07.612990 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f834b25-2305-4acc-9998-eca594071796-catalog-content\") pod \"community-operators-q4567\" (UID: \"2f834b25-2305-4acc-9998-eca594071796\") " pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:07 crc kubenswrapper[4902]: I1202 15:39:07.613126 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f834b25-2305-4acc-9998-eca594071796-utilities\") pod \"community-operators-q4567\" (UID: \"2f834b25-2305-4acc-9998-eca594071796\") " pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:07 crc kubenswrapper[4902]: I1202 15:39:07.626923 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q4567"] Dec 02 15:39:07 crc kubenswrapper[4902]: I1202 15:39:07.713844 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nj8fp\" (UniqueName: \"kubernetes.io/projected/2f834b25-2305-4acc-9998-eca594071796-kube-api-access-nj8fp\") pod \"community-operators-q4567\" (UID: \"2f834b25-2305-4acc-9998-eca594071796\") " pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:07 crc kubenswrapper[4902]: I1202 15:39:07.713970 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f834b25-2305-4acc-9998-eca594071796-catalog-content\") pod \"community-operators-q4567\" (UID: \"2f834b25-2305-4acc-9998-eca594071796\") " pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:07 crc kubenswrapper[4902]: I1202 15:39:07.714055 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f834b25-2305-4acc-9998-eca594071796-utilities\") pod \"community-operators-q4567\" (UID: \"2f834b25-2305-4acc-9998-eca594071796\") " pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:07 crc kubenswrapper[4902]: I1202 15:39:07.714705 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f834b25-2305-4acc-9998-eca594071796-catalog-content\") pod \"community-operators-q4567\" (UID: \"2f834b25-2305-4acc-9998-eca594071796\") " pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:07 crc kubenswrapper[4902]: I1202 15:39:07.714749 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f834b25-2305-4acc-9998-eca594071796-utilities\") pod \"community-operators-q4567\" (UID: \"2f834b25-2305-4acc-9998-eca594071796\") " pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:07 crc kubenswrapper[4902]: I1202 15:39:07.735950 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nj8fp\" (UniqueName: \"kubernetes.io/projected/2f834b25-2305-4acc-9998-eca594071796-kube-api-access-nj8fp\") pod \"community-operators-q4567\" (UID: \"2f834b25-2305-4acc-9998-eca594071796\") " pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:07 crc kubenswrapper[4902]: I1202 15:39:07.908489 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:08 crc kubenswrapper[4902]: I1202 15:39:08.532901 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q4567"] Dec 02 15:39:08 crc kubenswrapper[4902]: I1202 15:39:08.642395 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4567" event={"ID":"2f834b25-2305-4acc-9998-eca594071796","Type":"ContainerStarted","Data":"3dc6bec91994c613f0573ec34b03c23a51254f7713f4309e8ae719885d9c3f61"} Dec 02 15:39:09 crc kubenswrapper[4902]: I1202 15:39:09.654305 4902 generic.go:334] "Generic (PLEG): container finished" podID="2f834b25-2305-4acc-9998-eca594071796" containerID="a40fcc794a8365bc432ba33ea223a975f641c2bf0660ba5e5ea2711380b8d771" exitCode=0 Dec 02 15:39:09 crc kubenswrapper[4902]: I1202 15:39:09.654619 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4567" event={"ID":"2f834b25-2305-4acc-9998-eca594071796","Type":"ContainerDied","Data":"a40fcc794a8365bc432ba33ea223a975f641c2bf0660ba5e5ea2711380b8d771"} Dec 02 15:39:10 crc kubenswrapper[4902]: I1202 15:39:10.665640 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4567" event={"ID":"2f834b25-2305-4acc-9998-eca594071796","Type":"ContainerStarted","Data":"03c954d3ea8586987be3be90bde6b47291c1046f992416b7e738712db6e0aabc"} Dec 02 15:39:11 crc kubenswrapper[4902]: I1202 15:39:11.677285 4902 generic.go:334] "Generic (PLEG): container finished" podID="2f834b25-2305-4acc-9998-eca594071796" containerID="03c954d3ea8586987be3be90bde6b47291c1046f992416b7e738712db6e0aabc" exitCode=0 Dec 02 15:39:11 crc kubenswrapper[4902]: I1202 15:39:11.677557 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4567" event={"ID":"2f834b25-2305-4acc-9998-eca594071796","Type":"ContainerDied","Data":"03c954d3ea8586987be3be90bde6b47291c1046f992416b7e738712db6e0aabc"} Dec 02 15:39:12 crc kubenswrapper[4902]: I1202 15:39:12.688447 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4567" event={"ID":"2f834b25-2305-4acc-9998-eca594071796","Type":"ContainerStarted","Data":"bfe249839c3f20738cf343612ccce75e1ae65e60e452cd86c9277f07b32fd4ea"} Dec 02 15:39:12 crc kubenswrapper[4902]: I1202 15:39:12.716800 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-q4567" podStartSLOduration=3.107068105 podStartE2EDuration="5.716777045s" podCreationTimestamp="2025-12-02 15:39:07 +0000 UTC" firstStartedPulling="2025-12-02 15:39:09.657061155 +0000 UTC m=+4980.848369874" lastFinishedPulling="2025-12-02 15:39:12.266770105 +0000 UTC m=+4983.458078814" observedRunningTime="2025-12-02 15:39:12.707132282 +0000 UTC m=+4983.898441001" watchObservedRunningTime="2025-12-02 15:39:12.716777045 +0000 UTC m=+4983.908085754" Dec 02 15:39:14 crc kubenswrapper[4902]: I1202 15:39:14.265695 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-hvhq4/must-gather-z8rvc"] Dec 02 15:39:14 crc kubenswrapper[4902]: I1202 15:39:14.270161 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/must-gather-z8rvc" Dec 02 15:39:14 crc kubenswrapper[4902]: I1202 15:39:14.276276 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-hvhq4"/"openshift-service-ca.crt" Dec 02 15:39:14 crc kubenswrapper[4902]: I1202 15:39:14.276532 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-hvhq4"/"default-dockercfg-m8jwn" Dec 02 15:39:14 crc kubenswrapper[4902]: I1202 15:39:14.276696 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-hvhq4"/"kube-root-ca.crt" Dec 02 15:39:14 crc kubenswrapper[4902]: I1202 15:39:14.281272 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-hvhq4/must-gather-z8rvc"] Dec 02 15:39:14 crc kubenswrapper[4902]: I1202 15:39:14.376674 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gx49s\" (UniqueName: \"kubernetes.io/projected/c3ebb20e-c142-4cd0-842a-344552cc2014-kube-api-access-gx49s\") pod \"must-gather-z8rvc\" (UID: \"c3ebb20e-c142-4cd0-842a-344552cc2014\") " pod="openshift-must-gather-hvhq4/must-gather-z8rvc" Dec 02 15:39:14 crc kubenswrapper[4902]: I1202 15:39:14.376729 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c3ebb20e-c142-4cd0-842a-344552cc2014-must-gather-output\") pod \"must-gather-z8rvc\" (UID: \"c3ebb20e-c142-4cd0-842a-344552cc2014\") " pod="openshift-must-gather-hvhq4/must-gather-z8rvc" Dec 02 15:39:14 crc kubenswrapper[4902]: I1202 15:39:14.478403 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gx49s\" (UniqueName: \"kubernetes.io/projected/c3ebb20e-c142-4cd0-842a-344552cc2014-kube-api-access-gx49s\") pod \"must-gather-z8rvc\" (UID: \"c3ebb20e-c142-4cd0-842a-344552cc2014\") " pod="openshift-must-gather-hvhq4/must-gather-z8rvc" Dec 02 15:39:14 crc kubenswrapper[4902]: I1202 15:39:14.478454 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c3ebb20e-c142-4cd0-842a-344552cc2014-must-gather-output\") pod \"must-gather-z8rvc\" (UID: \"c3ebb20e-c142-4cd0-842a-344552cc2014\") " pod="openshift-must-gather-hvhq4/must-gather-z8rvc" Dec 02 15:39:14 crc kubenswrapper[4902]: I1202 15:39:14.479051 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c3ebb20e-c142-4cd0-842a-344552cc2014-must-gather-output\") pod \"must-gather-z8rvc\" (UID: \"c3ebb20e-c142-4cd0-842a-344552cc2014\") " pod="openshift-must-gather-hvhq4/must-gather-z8rvc" Dec 02 15:39:14 crc kubenswrapper[4902]: I1202 15:39:14.513278 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gx49s\" (UniqueName: \"kubernetes.io/projected/c3ebb20e-c142-4cd0-842a-344552cc2014-kube-api-access-gx49s\") pod \"must-gather-z8rvc\" (UID: \"c3ebb20e-c142-4cd0-842a-344552cc2014\") " pod="openshift-must-gather-hvhq4/must-gather-z8rvc" Dec 02 15:39:14 crc kubenswrapper[4902]: I1202 15:39:14.593381 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/must-gather-z8rvc" Dec 02 15:39:15 crc kubenswrapper[4902]: I1202 15:39:15.085556 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-hvhq4/must-gather-z8rvc"] Dec 02 15:39:15 crc kubenswrapper[4902]: W1202 15:39:15.090149 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3ebb20e_c142_4cd0_842a_344552cc2014.slice/crio-a66febb531bf477e35e1ff77cc6a9294066ff3084093877d34fd561e94b2c01c WatchSource:0}: Error finding container a66febb531bf477e35e1ff77cc6a9294066ff3084093877d34fd561e94b2c01c: Status 404 returned error can't find the container with id a66febb531bf477e35e1ff77cc6a9294066ff3084093877d34fd561e94b2c01c Dec 02 15:39:15 crc kubenswrapper[4902]: I1202 15:39:15.715712 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hvhq4/must-gather-z8rvc" event={"ID":"c3ebb20e-c142-4cd0-842a-344552cc2014","Type":"ContainerStarted","Data":"a66febb531bf477e35e1ff77cc6a9294066ff3084093877d34fd561e94b2c01c"} Dec 02 15:39:17 crc kubenswrapper[4902]: I1202 15:39:17.908772 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:17 crc kubenswrapper[4902]: I1202 15:39:17.909343 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:17 crc kubenswrapper[4902]: I1202 15:39:17.956576 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:18 crc kubenswrapper[4902]: I1202 15:39:18.792242 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:19 crc kubenswrapper[4902]: I1202 15:39:19.327258 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q4567"] Dec 02 15:39:20 crc kubenswrapper[4902]: I1202 15:39:20.765161 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hvhq4/must-gather-z8rvc" event={"ID":"c3ebb20e-c142-4cd0-842a-344552cc2014","Type":"ContainerStarted","Data":"c0632413fea6c6015b05cf933b70e64360d5a24e0d695760ac2cc5f0d2117653"} Dec 02 15:39:20 crc kubenswrapper[4902]: I1202 15:39:20.765761 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hvhq4/must-gather-z8rvc" event={"ID":"c3ebb20e-c142-4cd0-842a-344552cc2014","Type":"ContainerStarted","Data":"59b642074d8518f1443de6f27a8bc49ec090cc13fdc5003d26e6970fc3d73223"} Dec 02 15:39:20 crc kubenswrapper[4902]: I1202 15:39:20.765342 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-q4567" podUID="2f834b25-2305-4acc-9998-eca594071796" containerName="registry-server" containerID="cri-o://bfe249839c3f20738cf343612ccce75e1ae65e60e452cd86c9277f07b32fd4ea" gracePeriod=2 Dec 02 15:39:20 crc kubenswrapper[4902]: I1202 15:39:20.798975 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-hvhq4/must-gather-z8rvc" podStartSLOduration=2.286993173 podStartE2EDuration="6.798951858s" podCreationTimestamp="2025-12-02 15:39:14 +0000 UTC" firstStartedPulling="2025-12-02 15:39:15.093328383 +0000 UTC m=+4986.284637092" lastFinishedPulling="2025-12-02 15:39:19.605287068 +0000 UTC m=+4990.796595777" observedRunningTime="2025-12-02 15:39:20.789758367 +0000 UTC m=+4991.981067086" watchObservedRunningTime="2025-12-02 15:39:20.798951858 +0000 UTC m=+4991.990260587" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.232293 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.428790 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f834b25-2305-4acc-9998-eca594071796-catalog-content\") pod \"2f834b25-2305-4acc-9998-eca594071796\" (UID: \"2f834b25-2305-4acc-9998-eca594071796\") " Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.429136 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nj8fp\" (UniqueName: \"kubernetes.io/projected/2f834b25-2305-4acc-9998-eca594071796-kube-api-access-nj8fp\") pod \"2f834b25-2305-4acc-9998-eca594071796\" (UID: \"2f834b25-2305-4acc-9998-eca594071796\") " Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.430045 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f834b25-2305-4acc-9998-eca594071796-utilities\") pod \"2f834b25-2305-4acc-9998-eca594071796\" (UID: \"2f834b25-2305-4acc-9998-eca594071796\") " Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.431334 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f834b25-2305-4acc-9998-eca594071796-utilities" (OuterVolumeSpecName: "utilities") pod "2f834b25-2305-4acc-9998-eca594071796" (UID: "2f834b25-2305-4acc-9998-eca594071796"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.438817 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f834b25-2305-4acc-9998-eca594071796-kube-api-access-nj8fp" (OuterVolumeSpecName: "kube-api-access-nj8fp") pod "2f834b25-2305-4acc-9998-eca594071796" (UID: "2f834b25-2305-4acc-9998-eca594071796"). InnerVolumeSpecName "kube-api-access-nj8fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.491934 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f834b25-2305-4acc-9998-eca594071796-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f834b25-2305-4acc-9998-eca594071796" (UID: "2f834b25-2305-4acc-9998-eca594071796"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.531624 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f834b25-2305-4acc-9998-eca594071796-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.531960 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nj8fp\" (UniqueName: \"kubernetes.io/projected/2f834b25-2305-4acc-9998-eca594071796-kube-api-access-nj8fp\") on node \"crc\" DevicePath \"\"" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.532052 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f834b25-2305-4acc-9998-eca594071796-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.774150 4902 generic.go:334] "Generic (PLEG): container finished" podID="2f834b25-2305-4acc-9998-eca594071796" containerID="bfe249839c3f20738cf343612ccce75e1ae65e60e452cd86c9277f07b32fd4ea" exitCode=0 Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.774308 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q4567" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.774326 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4567" event={"ID":"2f834b25-2305-4acc-9998-eca594071796","Type":"ContainerDied","Data":"bfe249839c3f20738cf343612ccce75e1ae65e60e452cd86c9277f07b32fd4ea"} Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.774966 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4567" event={"ID":"2f834b25-2305-4acc-9998-eca594071796","Type":"ContainerDied","Data":"3dc6bec91994c613f0573ec34b03c23a51254f7713f4309e8ae719885d9c3f61"} Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.774987 4902 scope.go:117] "RemoveContainer" containerID="bfe249839c3f20738cf343612ccce75e1ae65e60e452cd86c9277f07b32fd4ea" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.800142 4902 scope.go:117] "RemoveContainer" containerID="03c954d3ea8586987be3be90bde6b47291c1046f992416b7e738712db6e0aabc" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.851900 4902 scope.go:117] "RemoveContainer" containerID="a40fcc794a8365bc432ba33ea223a975f641c2bf0660ba5e5ea2711380b8d771" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.854377 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q4567"] Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.864522 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-q4567"] Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.891420 4902 scope.go:117] "RemoveContainer" containerID="bfe249839c3f20738cf343612ccce75e1ae65e60e452cd86c9277f07b32fd4ea" Dec 02 15:39:21 crc kubenswrapper[4902]: E1202 15:39:21.891905 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfe249839c3f20738cf343612ccce75e1ae65e60e452cd86c9277f07b32fd4ea\": container with ID starting with bfe249839c3f20738cf343612ccce75e1ae65e60e452cd86c9277f07b32fd4ea not found: ID does not exist" containerID="bfe249839c3f20738cf343612ccce75e1ae65e60e452cd86c9277f07b32fd4ea" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.891946 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfe249839c3f20738cf343612ccce75e1ae65e60e452cd86c9277f07b32fd4ea"} err="failed to get container status \"bfe249839c3f20738cf343612ccce75e1ae65e60e452cd86c9277f07b32fd4ea\": rpc error: code = NotFound desc = could not find container \"bfe249839c3f20738cf343612ccce75e1ae65e60e452cd86c9277f07b32fd4ea\": container with ID starting with bfe249839c3f20738cf343612ccce75e1ae65e60e452cd86c9277f07b32fd4ea not found: ID does not exist" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.891967 4902 scope.go:117] "RemoveContainer" containerID="03c954d3ea8586987be3be90bde6b47291c1046f992416b7e738712db6e0aabc" Dec 02 15:39:21 crc kubenswrapper[4902]: E1202 15:39:21.892268 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03c954d3ea8586987be3be90bde6b47291c1046f992416b7e738712db6e0aabc\": container with ID starting with 03c954d3ea8586987be3be90bde6b47291c1046f992416b7e738712db6e0aabc not found: ID does not exist" containerID="03c954d3ea8586987be3be90bde6b47291c1046f992416b7e738712db6e0aabc" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.892296 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03c954d3ea8586987be3be90bde6b47291c1046f992416b7e738712db6e0aabc"} err="failed to get container status \"03c954d3ea8586987be3be90bde6b47291c1046f992416b7e738712db6e0aabc\": rpc error: code = NotFound desc = could not find container \"03c954d3ea8586987be3be90bde6b47291c1046f992416b7e738712db6e0aabc\": container with ID starting with 03c954d3ea8586987be3be90bde6b47291c1046f992416b7e738712db6e0aabc not found: ID does not exist" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.892313 4902 scope.go:117] "RemoveContainer" containerID="a40fcc794a8365bc432ba33ea223a975f641c2bf0660ba5e5ea2711380b8d771" Dec 02 15:39:21 crc kubenswrapper[4902]: E1202 15:39:21.892526 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a40fcc794a8365bc432ba33ea223a975f641c2bf0660ba5e5ea2711380b8d771\": container with ID starting with a40fcc794a8365bc432ba33ea223a975f641c2bf0660ba5e5ea2711380b8d771 not found: ID does not exist" containerID="a40fcc794a8365bc432ba33ea223a975f641c2bf0660ba5e5ea2711380b8d771" Dec 02 15:39:21 crc kubenswrapper[4902]: I1202 15:39:21.892550 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a40fcc794a8365bc432ba33ea223a975f641c2bf0660ba5e5ea2711380b8d771"} err="failed to get container status \"a40fcc794a8365bc432ba33ea223a975f641c2bf0660ba5e5ea2711380b8d771\": rpc error: code = NotFound desc = could not find container \"a40fcc794a8365bc432ba33ea223a975f641c2bf0660ba5e5ea2711380b8d771\": container with ID starting with a40fcc794a8365bc432ba33ea223a975f641c2bf0660ba5e5ea2711380b8d771 not found: ID does not exist" Dec 02 15:39:23 crc kubenswrapper[4902]: I1202 15:39:23.117797 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f834b25-2305-4acc-9998-eca594071796" path="/var/lib/kubelet/pods/2f834b25-2305-4acc-9998-eca594071796/volumes" Dec 02 15:39:23 crc kubenswrapper[4902]: E1202 15:39:23.128173 4902 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.251:33442->38.102.83.251:38083: write tcp 38.102.83.251:33442->38.102.83.251:38083: write: broken pipe Dec 02 15:39:24 crc kubenswrapper[4902]: I1202 15:39:24.135239 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-hvhq4/crc-debug-k4cr9"] Dec 02 15:39:24 crc kubenswrapper[4902]: E1202 15:39:24.135902 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f834b25-2305-4acc-9998-eca594071796" containerName="registry-server" Dec 02 15:39:24 crc kubenswrapper[4902]: I1202 15:39:24.135914 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f834b25-2305-4acc-9998-eca594071796" containerName="registry-server" Dec 02 15:39:24 crc kubenswrapper[4902]: E1202 15:39:24.135933 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f834b25-2305-4acc-9998-eca594071796" containerName="extract-content" Dec 02 15:39:24 crc kubenswrapper[4902]: I1202 15:39:24.135939 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f834b25-2305-4acc-9998-eca594071796" containerName="extract-content" Dec 02 15:39:24 crc kubenswrapper[4902]: E1202 15:39:24.135956 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f834b25-2305-4acc-9998-eca594071796" containerName="extract-utilities" Dec 02 15:39:24 crc kubenswrapper[4902]: I1202 15:39:24.135963 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f834b25-2305-4acc-9998-eca594071796" containerName="extract-utilities" Dec 02 15:39:24 crc kubenswrapper[4902]: I1202 15:39:24.136131 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f834b25-2305-4acc-9998-eca594071796" containerName="registry-server" Dec 02 15:39:24 crc kubenswrapper[4902]: I1202 15:39:24.136764 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/crc-debug-k4cr9" Dec 02 15:39:24 crc kubenswrapper[4902]: I1202 15:39:24.182435 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5xmx\" (UniqueName: \"kubernetes.io/projected/9ef80eb6-3980-4115-a3c1-956e37c6b70d-kube-api-access-x5xmx\") pod \"crc-debug-k4cr9\" (UID: \"9ef80eb6-3980-4115-a3c1-956e37c6b70d\") " pod="openshift-must-gather-hvhq4/crc-debug-k4cr9" Dec 02 15:39:24 crc kubenswrapper[4902]: I1202 15:39:24.182519 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9ef80eb6-3980-4115-a3c1-956e37c6b70d-host\") pod \"crc-debug-k4cr9\" (UID: \"9ef80eb6-3980-4115-a3c1-956e37c6b70d\") " pod="openshift-must-gather-hvhq4/crc-debug-k4cr9" Dec 02 15:39:24 crc kubenswrapper[4902]: I1202 15:39:24.284155 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9ef80eb6-3980-4115-a3c1-956e37c6b70d-host\") pod \"crc-debug-k4cr9\" (UID: \"9ef80eb6-3980-4115-a3c1-956e37c6b70d\") " pod="openshift-must-gather-hvhq4/crc-debug-k4cr9" Dec 02 15:39:24 crc kubenswrapper[4902]: I1202 15:39:24.284297 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9ef80eb6-3980-4115-a3c1-956e37c6b70d-host\") pod \"crc-debug-k4cr9\" (UID: \"9ef80eb6-3980-4115-a3c1-956e37c6b70d\") " pod="openshift-must-gather-hvhq4/crc-debug-k4cr9" Dec 02 15:39:24 crc kubenswrapper[4902]: I1202 15:39:24.285084 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5xmx\" (UniqueName: \"kubernetes.io/projected/9ef80eb6-3980-4115-a3c1-956e37c6b70d-kube-api-access-x5xmx\") pod \"crc-debug-k4cr9\" (UID: \"9ef80eb6-3980-4115-a3c1-956e37c6b70d\") " pod="openshift-must-gather-hvhq4/crc-debug-k4cr9" Dec 02 15:39:24 crc kubenswrapper[4902]: I1202 15:39:24.310010 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5xmx\" (UniqueName: \"kubernetes.io/projected/9ef80eb6-3980-4115-a3c1-956e37c6b70d-kube-api-access-x5xmx\") pod \"crc-debug-k4cr9\" (UID: \"9ef80eb6-3980-4115-a3c1-956e37c6b70d\") " pod="openshift-must-gather-hvhq4/crc-debug-k4cr9" Dec 02 15:39:24 crc kubenswrapper[4902]: I1202 15:39:24.454986 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/crc-debug-k4cr9" Dec 02 15:39:24 crc kubenswrapper[4902]: W1202 15:39:24.490746 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ef80eb6_3980_4115_a3c1_956e37c6b70d.slice/crio-5e2f7555691a3b248b630baf3e4e3f6268b874346d4f755faa245f95a653ce18 WatchSource:0}: Error finding container 5e2f7555691a3b248b630baf3e4e3f6268b874346d4f755faa245f95a653ce18: Status 404 returned error can't find the container with id 5e2f7555691a3b248b630baf3e4e3f6268b874346d4f755faa245f95a653ce18 Dec 02 15:39:24 crc kubenswrapper[4902]: I1202 15:39:24.827108 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hvhq4/crc-debug-k4cr9" event={"ID":"9ef80eb6-3980-4115-a3c1-956e37c6b70d","Type":"ContainerStarted","Data":"5e2f7555691a3b248b630baf3e4e3f6268b874346d4f755faa245f95a653ce18"} Dec 02 15:39:35 crc kubenswrapper[4902]: I1202 15:39:35.949041 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hvhq4/crc-debug-k4cr9" event={"ID":"9ef80eb6-3980-4115-a3c1-956e37c6b70d","Type":"ContainerStarted","Data":"f6986802e0ab253d79a663d28c30c202e678ab7723fff8d4e39b8816df59d8af"} Dec 02 15:39:35 crc kubenswrapper[4902]: I1202 15:39:35.970950 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-hvhq4/crc-debug-k4cr9" podStartSLOduration=1.175346358 podStartE2EDuration="11.97093431s" podCreationTimestamp="2025-12-02 15:39:24 +0000 UTC" firstStartedPulling="2025-12-02 15:39:24.492408841 +0000 UTC m=+4995.683717550" lastFinishedPulling="2025-12-02 15:39:35.287996773 +0000 UTC m=+5006.479305502" observedRunningTime="2025-12-02 15:39:35.963957213 +0000 UTC m=+5007.155265922" watchObservedRunningTime="2025-12-02 15:39:35.97093431 +0000 UTC m=+5007.162243019" Dec 02 15:40:04 crc kubenswrapper[4902]: I1202 15:40:04.732177 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:40:04 crc kubenswrapper[4902]: I1202 15:40:04.732738 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:40:21 crc kubenswrapper[4902]: I1202 15:40:21.372583 4902 generic.go:334] "Generic (PLEG): container finished" podID="9ef80eb6-3980-4115-a3c1-956e37c6b70d" containerID="f6986802e0ab253d79a663d28c30c202e678ab7723fff8d4e39b8816df59d8af" exitCode=0 Dec 02 15:40:21 crc kubenswrapper[4902]: I1202 15:40:21.372664 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hvhq4/crc-debug-k4cr9" event={"ID":"9ef80eb6-3980-4115-a3c1-956e37c6b70d","Type":"ContainerDied","Data":"f6986802e0ab253d79a663d28c30c202e678ab7723fff8d4e39b8816df59d8af"} Dec 02 15:40:22 crc kubenswrapper[4902]: I1202 15:40:22.501521 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/crc-debug-k4cr9" Dec 02 15:40:22 crc kubenswrapper[4902]: I1202 15:40:22.539750 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-hvhq4/crc-debug-k4cr9"] Dec 02 15:40:22 crc kubenswrapper[4902]: I1202 15:40:22.549126 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-hvhq4/crc-debug-k4cr9"] Dec 02 15:40:22 crc kubenswrapper[4902]: I1202 15:40:22.639481 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5xmx\" (UniqueName: \"kubernetes.io/projected/9ef80eb6-3980-4115-a3c1-956e37c6b70d-kube-api-access-x5xmx\") pod \"9ef80eb6-3980-4115-a3c1-956e37c6b70d\" (UID: \"9ef80eb6-3980-4115-a3c1-956e37c6b70d\") " Dec 02 15:40:22 crc kubenswrapper[4902]: I1202 15:40:22.639629 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9ef80eb6-3980-4115-a3c1-956e37c6b70d-host\") pod \"9ef80eb6-3980-4115-a3c1-956e37c6b70d\" (UID: \"9ef80eb6-3980-4115-a3c1-956e37c6b70d\") " Dec 02 15:40:22 crc kubenswrapper[4902]: I1202 15:40:22.639771 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9ef80eb6-3980-4115-a3c1-956e37c6b70d-host" (OuterVolumeSpecName: "host") pod "9ef80eb6-3980-4115-a3c1-956e37c6b70d" (UID: "9ef80eb6-3980-4115-a3c1-956e37c6b70d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 15:40:22 crc kubenswrapper[4902]: I1202 15:40:22.640133 4902 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9ef80eb6-3980-4115-a3c1-956e37c6b70d-host\") on node \"crc\" DevicePath \"\"" Dec 02 15:40:22 crc kubenswrapper[4902]: I1202 15:40:22.644939 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ef80eb6-3980-4115-a3c1-956e37c6b70d-kube-api-access-x5xmx" (OuterVolumeSpecName: "kube-api-access-x5xmx") pod "9ef80eb6-3980-4115-a3c1-956e37c6b70d" (UID: "9ef80eb6-3980-4115-a3c1-956e37c6b70d"). InnerVolumeSpecName "kube-api-access-x5xmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:40:22 crc kubenswrapper[4902]: I1202 15:40:22.742183 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5xmx\" (UniqueName: \"kubernetes.io/projected/9ef80eb6-3980-4115-a3c1-956e37c6b70d-kube-api-access-x5xmx\") on node \"crc\" DevicePath \"\"" Dec 02 15:40:23 crc kubenswrapper[4902]: I1202 15:40:23.119621 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ef80eb6-3980-4115-a3c1-956e37c6b70d" path="/var/lib/kubelet/pods/9ef80eb6-3980-4115-a3c1-956e37c6b70d/volumes" Dec 02 15:40:23 crc kubenswrapper[4902]: I1202 15:40:23.395474 4902 scope.go:117] "RemoveContainer" containerID="f6986802e0ab253d79a663d28c30c202e678ab7723fff8d4e39b8816df59d8af" Dec 02 15:40:23 crc kubenswrapper[4902]: I1202 15:40:23.395504 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/crc-debug-k4cr9" Dec 02 15:40:23 crc kubenswrapper[4902]: I1202 15:40:23.840306 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-hvhq4/crc-debug-4ccpn"] Dec 02 15:40:23 crc kubenswrapper[4902]: E1202 15:40:23.840749 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ef80eb6-3980-4115-a3c1-956e37c6b70d" containerName="container-00" Dec 02 15:40:23 crc kubenswrapper[4902]: I1202 15:40:23.840764 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ef80eb6-3980-4115-a3c1-956e37c6b70d" containerName="container-00" Dec 02 15:40:23 crc kubenswrapper[4902]: I1202 15:40:23.840991 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ef80eb6-3980-4115-a3c1-956e37c6b70d" containerName="container-00" Dec 02 15:40:23 crc kubenswrapper[4902]: I1202 15:40:23.841765 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/crc-debug-4ccpn" Dec 02 15:40:23 crc kubenswrapper[4902]: I1202 15:40:23.964438 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d69kh\" (UniqueName: \"kubernetes.io/projected/8f9cca49-23f3-4db1-9108-27a26e72466f-kube-api-access-d69kh\") pod \"crc-debug-4ccpn\" (UID: \"8f9cca49-23f3-4db1-9108-27a26e72466f\") " pod="openshift-must-gather-hvhq4/crc-debug-4ccpn" Dec 02 15:40:23 crc kubenswrapper[4902]: I1202 15:40:23.964503 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8f9cca49-23f3-4db1-9108-27a26e72466f-host\") pod \"crc-debug-4ccpn\" (UID: \"8f9cca49-23f3-4db1-9108-27a26e72466f\") " pod="openshift-must-gather-hvhq4/crc-debug-4ccpn" Dec 02 15:40:24 crc kubenswrapper[4902]: I1202 15:40:24.065995 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d69kh\" (UniqueName: \"kubernetes.io/projected/8f9cca49-23f3-4db1-9108-27a26e72466f-kube-api-access-d69kh\") pod \"crc-debug-4ccpn\" (UID: \"8f9cca49-23f3-4db1-9108-27a26e72466f\") " pod="openshift-must-gather-hvhq4/crc-debug-4ccpn" Dec 02 15:40:24 crc kubenswrapper[4902]: I1202 15:40:24.066031 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8f9cca49-23f3-4db1-9108-27a26e72466f-host\") pod \"crc-debug-4ccpn\" (UID: \"8f9cca49-23f3-4db1-9108-27a26e72466f\") " pod="openshift-must-gather-hvhq4/crc-debug-4ccpn" Dec 02 15:40:24 crc kubenswrapper[4902]: I1202 15:40:24.066208 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8f9cca49-23f3-4db1-9108-27a26e72466f-host\") pod \"crc-debug-4ccpn\" (UID: \"8f9cca49-23f3-4db1-9108-27a26e72466f\") " pod="openshift-must-gather-hvhq4/crc-debug-4ccpn" Dec 02 15:40:24 crc kubenswrapper[4902]: I1202 15:40:24.085257 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d69kh\" (UniqueName: \"kubernetes.io/projected/8f9cca49-23f3-4db1-9108-27a26e72466f-kube-api-access-d69kh\") pod \"crc-debug-4ccpn\" (UID: \"8f9cca49-23f3-4db1-9108-27a26e72466f\") " pod="openshift-must-gather-hvhq4/crc-debug-4ccpn" Dec 02 15:40:24 crc kubenswrapper[4902]: I1202 15:40:24.141545 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/crc-debug-4ccpn" Dec 02 15:40:24 crc kubenswrapper[4902]: W1202 15:40:24.169571 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f9cca49_23f3_4db1_9108_27a26e72466f.slice/crio-8b4d28740ee9a99d7913826381f9f8c7f8e1153b7982c4f02dffe1574ad203f1 WatchSource:0}: Error finding container 8b4d28740ee9a99d7913826381f9f8c7f8e1153b7982c4f02dffe1574ad203f1: Status 404 returned error can't find the container with id 8b4d28740ee9a99d7913826381f9f8c7f8e1153b7982c4f02dffe1574ad203f1 Dec 02 15:40:24 crc kubenswrapper[4902]: I1202 15:40:24.406663 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hvhq4/crc-debug-4ccpn" event={"ID":"8f9cca49-23f3-4db1-9108-27a26e72466f","Type":"ContainerStarted","Data":"8b4d28740ee9a99d7913826381f9f8c7f8e1153b7982c4f02dffe1574ad203f1"} Dec 02 15:40:25 crc kubenswrapper[4902]: I1202 15:40:25.425514 4902 generic.go:334] "Generic (PLEG): container finished" podID="8f9cca49-23f3-4db1-9108-27a26e72466f" containerID="bc8ae2d803da0f0ba5fd888a67397bbe45ebdd760eb375344e5a629f1bda195d" exitCode=0 Dec 02 15:40:25 crc kubenswrapper[4902]: I1202 15:40:25.425577 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hvhq4/crc-debug-4ccpn" event={"ID":"8f9cca49-23f3-4db1-9108-27a26e72466f","Type":"ContainerDied","Data":"bc8ae2d803da0f0ba5fd888a67397bbe45ebdd760eb375344e5a629f1bda195d"} Dec 02 15:40:26 crc kubenswrapper[4902]: I1202 15:40:26.539207 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/crc-debug-4ccpn" Dec 02 15:40:26 crc kubenswrapper[4902]: I1202 15:40:26.723626 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d69kh\" (UniqueName: \"kubernetes.io/projected/8f9cca49-23f3-4db1-9108-27a26e72466f-kube-api-access-d69kh\") pod \"8f9cca49-23f3-4db1-9108-27a26e72466f\" (UID: \"8f9cca49-23f3-4db1-9108-27a26e72466f\") " Dec 02 15:40:26 crc kubenswrapper[4902]: I1202 15:40:26.723706 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8f9cca49-23f3-4db1-9108-27a26e72466f-host\") pod \"8f9cca49-23f3-4db1-9108-27a26e72466f\" (UID: \"8f9cca49-23f3-4db1-9108-27a26e72466f\") " Dec 02 15:40:26 crc kubenswrapper[4902]: I1202 15:40:26.724284 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8f9cca49-23f3-4db1-9108-27a26e72466f-host" (OuterVolumeSpecName: "host") pod "8f9cca49-23f3-4db1-9108-27a26e72466f" (UID: "8f9cca49-23f3-4db1-9108-27a26e72466f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 15:40:26 crc kubenswrapper[4902]: I1202 15:40:26.729070 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f9cca49-23f3-4db1-9108-27a26e72466f-kube-api-access-d69kh" (OuterVolumeSpecName: "kube-api-access-d69kh") pod "8f9cca49-23f3-4db1-9108-27a26e72466f" (UID: "8f9cca49-23f3-4db1-9108-27a26e72466f"). InnerVolumeSpecName "kube-api-access-d69kh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:40:26 crc kubenswrapper[4902]: I1202 15:40:26.825684 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d69kh\" (UniqueName: \"kubernetes.io/projected/8f9cca49-23f3-4db1-9108-27a26e72466f-kube-api-access-d69kh\") on node \"crc\" DevicePath \"\"" Dec 02 15:40:26 crc kubenswrapper[4902]: I1202 15:40:26.825724 4902 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8f9cca49-23f3-4db1-9108-27a26e72466f-host\") on node \"crc\" DevicePath \"\"" Dec 02 15:40:27 crc kubenswrapper[4902]: I1202 15:40:27.439787 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hvhq4/crc-debug-4ccpn" event={"ID":"8f9cca49-23f3-4db1-9108-27a26e72466f","Type":"ContainerDied","Data":"8b4d28740ee9a99d7913826381f9f8c7f8e1153b7982c4f02dffe1574ad203f1"} Dec 02 15:40:27 crc kubenswrapper[4902]: I1202 15:40:27.439825 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b4d28740ee9a99d7913826381f9f8c7f8e1153b7982c4f02dffe1574ad203f1" Dec 02 15:40:27 crc kubenswrapper[4902]: I1202 15:40:27.439850 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/crc-debug-4ccpn" Dec 02 15:40:27 crc kubenswrapper[4902]: I1202 15:40:27.631240 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-hvhq4/crc-debug-4ccpn"] Dec 02 15:40:27 crc kubenswrapper[4902]: I1202 15:40:27.638271 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-hvhq4/crc-debug-4ccpn"] Dec 02 15:40:28 crc kubenswrapper[4902]: I1202 15:40:28.940179 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-hvhq4/crc-debug-dqcdl"] Dec 02 15:40:28 crc kubenswrapper[4902]: E1202 15:40:28.940648 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f9cca49-23f3-4db1-9108-27a26e72466f" containerName="container-00" Dec 02 15:40:28 crc kubenswrapper[4902]: I1202 15:40:28.940663 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f9cca49-23f3-4db1-9108-27a26e72466f" containerName="container-00" Dec 02 15:40:28 crc kubenswrapper[4902]: I1202 15:40:28.940935 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f9cca49-23f3-4db1-9108-27a26e72466f" containerName="container-00" Dec 02 15:40:28 crc kubenswrapper[4902]: I1202 15:40:28.941803 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/crc-debug-dqcdl" Dec 02 15:40:29 crc kubenswrapper[4902]: I1202 15:40:29.092603 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdq2r\" (UniqueName: \"kubernetes.io/projected/f1a69a76-ea22-40f9-a764-93999aaace9e-kube-api-access-vdq2r\") pod \"crc-debug-dqcdl\" (UID: \"f1a69a76-ea22-40f9-a764-93999aaace9e\") " pod="openshift-must-gather-hvhq4/crc-debug-dqcdl" Dec 02 15:40:29 crc kubenswrapper[4902]: I1202 15:40:29.092906 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1a69a76-ea22-40f9-a764-93999aaace9e-host\") pod \"crc-debug-dqcdl\" (UID: \"f1a69a76-ea22-40f9-a764-93999aaace9e\") " pod="openshift-must-gather-hvhq4/crc-debug-dqcdl" Dec 02 15:40:29 crc kubenswrapper[4902]: I1202 15:40:29.127350 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f9cca49-23f3-4db1-9108-27a26e72466f" path="/var/lib/kubelet/pods/8f9cca49-23f3-4db1-9108-27a26e72466f/volumes" Dec 02 15:40:29 crc kubenswrapper[4902]: I1202 15:40:29.197422 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1a69a76-ea22-40f9-a764-93999aaace9e-host\") pod \"crc-debug-dqcdl\" (UID: \"f1a69a76-ea22-40f9-a764-93999aaace9e\") " pod="openshift-must-gather-hvhq4/crc-debug-dqcdl" Dec 02 15:40:29 crc kubenswrapper[4902]: I1202 15:40:29.197519 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdq2r\" (UniqueName: \"kubernetes.io/projected/f1a69a76-ea22-40f9-a764-93999aaace9e-kube-api-access-vdq2r\") pod \"crc-debug-dqcdl\" (UID: \"f1a69a76-ea22-40f9-a764-93999aaace9e\") " pod="openshift-must-gather-hvhq4/crc-debug-dqcdl" Dec 02 15:40:29 crc kubenswrapper[4902]: I1202 15:40:29.197628 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1a69a76-ea22-40f9-a764-93999aaace9e-host\") pod \"crc-debug-dqcdl\" (UID: \"f1a69a76-ea22-40f9-a764-93999aaace9e\") " pod="openshift-must-gather-hvhq4/crc-debug-dqcdl" Dec 02 15:40:29 crc kubenswrapper[4902]: I1202 15:40:29.222512 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdq2r\" (UniqueName: \"kubernetes.io/projected/f1a69a76-ea22-40f9-a764-93999aaace9e-kube-api-access-vdq2r\") pod \"crc-debug-dqcdl\" (UID: \"f1a69a76-ea22-40f9-a764-93999aaace9e\") " pod="openshift-must-gather-hvhq4/crc-debug-dqcdl" Dec 02 15:40:29 crc kubenswrapper[4902]: I1202 15:40:29.258036 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/crc-debug-dqcdl" Dec 02 15:40:29 crc kubenswrapper[4902]: I1202 15:40:29.461188 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hvhq4/crc-debug-dqcdl" event={"ID":"f1a69a76-ea22-40f9-a764-93999aaace9e","Type":"ContainerStarted","Data":"44d1ab2a153a4b5f27227d5799693b223cc29c9cb2d0b1df04e4871cda6c5935"} Dec 02 15:40:30 crc kubenswrapper[4902]: I1202 15:40:30.476961 4902 generic.go:334] "Generic (PLEG): container finished" podID="f1a69a76-ea22-40f9-a764-93999aaace9e" containerID="1524f6da77bcfca3a49229a49253a956d9b114354f0649a0ec7be83843d1239d" exitCode=0 Dec 02 15:40:30 crc kubenswrapper[4902]: I1202 15:40:30.477039 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hvhq4/crc-debug-dqcdl" event={"ID":"f1a69a76-ea22-40f9-a764-93999aaace9e","Type":"ContainerDied","Data":"1524f6da77bcfca3a49229a49253a956d9b114354f0649a0ec7be83843d1239d"} Dec 02 15:40:30 crc kubenswrapper[4902]: I1202 15:40:30.526151 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-hvhq4/crc-debug-dqcdl"] Dec 02 15:40:30 crc kubenswrapper[4902]: I1202 15:40:30.538536 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-hvhq4/crc-debug-dqcdl"] Dec 02 15:40:31 crc kubenswrapper[4902]: I1202 15:40:31.797966 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/crc-debug-dqcdl" Dec 02 15:40:31 crc kubenswrapper[4902]: I1202 15:40:31.962436 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdq2r\" (UniqueName: \"kubernetes.io/projected/f1a69a76-ea22-40f9-a764-93999aaace9e-kube-api-access-vdq2r\") pod \"f1a69a76-ea22-40f9-a764-93999aaace9e\" (UID: \"f1a69a76-ea22-40f9-a764-93999aaace9e\") " Dec 02 15:40:31 crc kubenswrapper[4902]: I1202 15:40:31.962675 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1a69a76-ea22-40f9-a764-93999aaace9e-host\") pod \"f1a69a76-ea22-40f9-a764-93999aaace9e\" (UID: \"f1a69a76-ea22-40f9-a764-93999aaace9e\") " Dec 02 15:40:31 crc kubenswrapper[4902]: I1202 15:40:31.962812 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f1a69a76-ea22-40f9-a764-93999aaace9e-host" (OuterVolumeSpecName: "host") pod "f1a69a76-ea22-40f9-a764-93999aaace9e" (UID: "f1a69a76-ea22-40f9-a764-93999aaace9e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 15:40:31 crc kubenswrapper[4902]: I1202 15:40:31.965965 4902 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1a69a76-ea22-40f9-a764-93999aaace9e-host\") on node \"crc\" DevicePath \"\"" Dec 02 15:40:31 crc kubenswrapper[4902]: I1202 15:40:31.973761 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1a69a76-ea22-40f9-a764-93999aaace9e-kube-api-access-vdq2r" (OuterVolumeSpecName: "kube-api-access-vdq2r") pod "f1a69a76-ea22-40f9-a764-93999aaace9e" (UID: "f1a69a76-ea22-40f9-a764-93999aaace9e"). InnerVolumeSpecName "kube-api-access-vdq2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:40:32 crc kubenswrapper[4902]: I1202 15:40:32.068078 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdq2r\" (UniqueName: \"kubernetes.io/projected/f1a69a76-ea22-40f9-a764-93999aaace9e-kube-api-access-vdq2r\") on node \"crc\" DevicePath \"\"" Dec 02 15:40:32 crc kubenswrapper[4902]: I1202 15:40:32.696356 4902 scope.go:117] "RemoveContainer" containerID="1524f6da77bcfca3a49229a49253a956d9b114354f0649a0ec7be83843d1239d" Dec 02 15:40:32 crc kubenswrapper[4902]: I1202 15:40:32.696449 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/crc-debug-dqcdl" Dec 02 15:40:33 crc kubenswrapper[4902]: I1202 15:40:33.120448 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1a69a76-ea22-40f9-a764-93999aaace9e" path="/var/lib/kubelet/pods/f1a69a76-ea22-40f9-a764-93999aaace9e/volumes" Dec 02 15:40:34 crc kubenswrapper[4902]: I1202 15:40:34.731978 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:40:34 crc kubenswrapper[4902]: I1202 15:40:34.732667 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:40:58 crc kubenswrapper[4902]: I1202 15:40:58.599396 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54c8dd8948-wqzzm_eb085e9d-79c7-495c-b0b3-7fbe62e96cf7/barbican-api-log/0.log" Dec 02 15:40:58 crc kubenswrapper[4902]: I1202 15:40:58.636704 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54c8dd8948-wqzzm_eb085e9d-79c7-495c-b0b3-7fbe62e96cf7/barbican-api/0.log" Dec 02 15:40:58 crc kubenswrapper[4902]: I1202 15:40:58.752126 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7cb4b5cd86-6nl4d_1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64/barbican-keystone-listener/0.log" Dec 02 15:40:58 crc kubenswrapper[4902]: I1202 15:40:58.815332 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7cb4b5cd86-6nl4d_1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64/barbican-keystone-listener-log/0.log" Dec 02 15:40:58 crc kubenswrapper[4902]: I1202 15:40:58.905778 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5d54dc89f-fxxfb_bea5327c-b120-4e9b-876c-94e46621dcad/barbican-worker/0.log" Dec 02 15:40:58 crc kubenswrapper[4902]: I1202 15:40:58.950135 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5d54dc89f-fxxfb_bea5327c-b120-4e9b-876c-94e46621dcad/barbican-worker-log/0.log" Dec 02 15:40:59 crc kubenswrapper[4902]: I1202 15:40:59.155299 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b_0d54fbdd-0a68-449d-a2ef-1a4de4c25b02/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:40:59 crc kubenswrapper[4902]: I1202 15:40:59.201934 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_966e0415-09e4-4b17-9806-7d6e570ed19a/ceilometer-central-agent/0.log" Dec 02 15:40:59 crc kubenswrapper[4902]: I1202 15:40:59.294981 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_966e0415-09e4-4b17-9806-7d6e570ed19a/ceilometer-notification-agent/0.log" Dec 02 15:40:59 crc kubenswrapper[4902]: I1202 15:40:59.328590 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_966e0415-09e4-4b17-9806-7d6e570ed19a/sg-core/0.log" Dec 02 15:40:59 crc kubenswrapper[4902]: I1202 15:40:59.345402 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_966e0415-09e4-4b17-9806-7d6e570ed19a/proxy-httpd/0.log" Dec 02 15:40:59 crc kubenswrapper[4902]: I1202 15:40:59.533010 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e646a9bd-99e5-41c1-8187-076691cad16e/cinder-api/0.log" Dec 02 15:40:59 crc kubenswrapper[4902]: I1202 15:40:59.567056 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e646a9bd-99e5-41c1-8187-076691cad16e/cinder-api-log/0.log" Dec 02 15:40:59 crc kubenswrapper[4902]: I1202 15:40:59.662951 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b464cf62-dd2e-4885-9aeb-98c7da8d9e37/cinder-scheduler/0.log" Dec 02 15:40:59 crc kubenswrapper[4902]: I1202 15:40:59.773703 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b464cf62-dd2e-4885-9aeb-98c7da8d9e37/probe/0.log" Dec 02 15:40:59 crc kubenswrapper[4902]: I1202 15:40:59.841737 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2_f5c865c3-fd5d-4bc9-bf96-d1f57ff89203/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:41:00 crc kubenswrapper[4902]: I1202 15:41:00.006732 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5_c491ad4c-9428-41e3-8c4d-bcd59ff50ca3/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:41:00 crc kubenswrapper[4902]: I1202 15:41:00.053784 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd9bffc9-84qbs_a97892a2-52af-4c1b-9db8-be3b7522774d/init/0.log" Dec 02 15:41:00 crc kubenswrapper[4902]: I1202 15:41:00.205750 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd9bffc9-84qbs_a97892a2-52af-4c1b-9db8-be3b7522774d/init/0.log" Dec 02 15:41:00 crc kubenswrapper[4902]: I1202 15:41:00.315950 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp_359a7d26-5917-4e98-be9e-55f4702c2ac7/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:41:00 crc kubenswrapper[4902]: I1202 15:41:00.352528 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd9bffc9-84qbs_a97892a2-52af-4c1b-9db8-be3b7522774d/dnsmasq-dns/0.log" Dec 02 15:41:00 crc kubenswrapper[4902]: I1202 15:41:00.501171 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_3d0b949e-6c1b-4c5c-955e-53e6ab278555/glance-log/0.log" Dec 02 15:41:00 crc kubenswrapper[4902]: I1202 15:41:00.529695 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_3d0b949e-6c1b-4c5c-955e-53e6ab278555/glance-httpd/0.log" Dec 02 15:41:00 crc kubenswrapper[4902]: I1202 15:41:00.669976 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e/glance-log/0.log" Dec 02 15:41:00 crc kubenswrapper[4902]: I1202 15:41:00.701613 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e/glance-httpd/0.log" Dec 02 15:41:00 crc kubenswrapper[4902]: I1202 15:41:00.859743 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-54c4bbdfbb-v8pjf_462ead25-02ec-4791-b927-56cf3f26ca39/horizon/0.log" Dec 02 15:41:00 crc kubenswrapper[4902]: I1202 15:41:00.999238 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp_53ecd2a3-800b-4718-bf18-8b77eb9bbbe8/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:41:01 crc kubenswrapper[4902]: I1202 15:41:01.293607 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-s9g5z_0cb0ec2d-c764-45bf-a7e7-02d3f75d2628/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:41:01 crc kubenswrapper[4902]: I1202 15:41:01.527188 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-54c4bbdfbb-v8pjf_462ead25-02ec-4791-b927-56cf3f26ca39/horizon-log/0.log" Dec 02 15:41:01 crc kubenswrapper[4902]: I1202 15:41:01.533074 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29411461-gmgwz_f2adf6c8-4207-43b6-9149-76b5e8e13c6a/keystone-cron/0.log" Dec 02 15:41:01 crc kubenswrapper[4902]: I1202 15:41:01.732661 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-867694b54b-2t5p7_1a591864-bb12-4847-ba0a-567637fbdaa9/keystone-api/0.log" Dec 02 15:41:01 crc kubenswrapper[4902]: I1202 15:41:01.795235 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl_197fff36-7bd5-46a0-a0e1-5b986b4cfc61/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:41:01 crc kubenswrapper[4902]: I1202 15:41:01.807515 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_8588d8af-b946-4d96-bd28-472301c12a7b/kube-state-metrics/0.log" Dec 02 15:41:02 crc kubenswrapper[4902]: I1202 15:41:02.241173 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7bffdc6c65-srhkc_04c855a2-1a97-4b81-83c2-d3b51678293d/neutron-httpd/0.log" Dec 02 15:41:02 crc kubenswrapper[4902]: I1202 15:41:02.242423 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7bffdc6c65-srhkc_04c855a2-1a97-4b81-83c2-d3b51678293d/neutron-api/0.log" Dec 02 15:41:02 crc kubenswrapper[4902]: I1202 15:41:02.272768 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v_6b639ec2-25b0-47b7-8e8e-4ff7fa466e47/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:41:02 crc kubenswrapper[4902]: I1202 15:41:02.842857 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_c866917d-0d79-4d60-8d9b-066986964749/nova-cell0-conductor-conductor/0.log" Dec 02 15:41:03 crc kubenswrapper[4902]: I1202 15:41:03.146158 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_9eccfb03-21f3-4259-85bc-625274a3b3a8/nova-cell1-conductor-conductor/0.log" Dec 02 15:41:03 crc kubenswrapper[4902]: I1202 15:41:03.400777 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_abcdfeb8-008e-4d99-8860-179b489f7783/nova-cell1-novncproxy-novncproxy/0.log" Dec 02 15:41:03 crc kubenswrapper[4902]: I1202 15:41:03.468458 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_c8412330-d9d3-42bd-be8a-212966221fda/nova-api-log/0.log" Dec 02 15:41:03 crc kubenswrapper[4902]: I1202 15:41:03.618411 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-vmxms_61892f44-715d-453e-83fd-b62cd886d24e/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:41:03 crc kubenswrapper[4902]: I1202 15:41:03.739116 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_50eb495f-ceaa-4583-b3d6-42ba67a92160/nova-metadata-log/0.log" Dec 02 15:41:03 crc kubenswrapper[4902]: I1202 15:41:03.794257 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_c8412330-d9d3-42bd-be8a-212966221fda/nova-api-api/0.log" Dec 02 15:41:04 crc kubenswrapper[4902]: I1202 15:41:04.158045 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_3525dcb7-da07-49a6-8786-5e046303b028/mysql-bootstrap/0.log" Dec 02 15:41:04 crc kubenswrapper[4902]: I1202 15:41:04.341777 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_10a0095a-bf9d-45ec-9b8f-1b1543ed641c/nova-scheduler-scheduler/0.log" Dec 02 15:41:04 crc kubenswrapper[4902]: I1202 15:41:04.357254 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_3525dcb7-da07-49a6-8786-5e046303b028/galera/0.log" Dec 02 15:41:04 crc kubenswrapper[4902]: I1202 15:41:04.416796 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_3525dcb7-da07-49a6-8786-5e046303b028/mysql-bootstrap/0.log" Dec 02 15:41:04 crc kubenswrapper[4902]: I1202 15:41:04.574017 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_ff122a60-3f1a-40cb-b1e6-e871037f63c6/mysql-bootstrap/0.log" Dec 02 15:41:04 crc kubenswrapper[4902]: I1202 15:41:04.731078 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:41:04 crc kubenswrapper[4902]: I1202 15:41:04.731134 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:41:04 crc kubenswrapper[4902]: I1202 15:41:04.731179 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 15:41:04 crc kubenswrapper[4902]: I1202 15:41:04.732044 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d874727028053e419314c0c14db863d86b27cc896b75b0a0805a70ab67d01348"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 15:41:04 crc kubenswrapper[4902]: I1202 15:41:04.732126 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://d874727028053e419314c0c14db863d86b27cc896b75b0a0805a70ab67d01348" gracePeriod=600 Dec 02 15:41:04 crc kubenswrapper[4902]: I1202 15:41:04.785307 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_ff122a60-3f1a-40cb-b1e6-e871037f63c6/mysql-bootstrap/0.log" Dec 02 15:41:04 crc kubenswrapper[4902]: I1202 15:41:04.848973 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_ff122a60-3f1a-40cb-b1e6-e871037f63c6/galera/0.log" Dec 02 15:41:04 crc kubenswrapper[4902]: I1202 15:41:04.991191 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_1ac7fd15-4a88-4085-ae7c-8f646c29943f/openstackclient/0.log" Dec 02 15:41:05 crc kubenswrapper[4902]: I1202 15:41:05.073387 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="d874727028053e419314c0c14db863d86b27cc896b75b0a0805a70ab67d01348" exitCode=0 Dec 02 15:41:05 crc kubenswrapper[4902]: I1202 15:41:05.073431 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"d874727028053e419314c0c14db863d86b27cc896b75b0a0805a70ab67d01348"} Dec 02 15:41:05 crc kubenswrapper[4902]: I1202 15:41:05.073465 4902 scope.go:117] "RemoveContainer" containerID="c0f19b45286edcce2a4756d9cae6f2c461eaa36a9b7f0cd0620d5d21e7a0abcc" Dec 02 15:41:05 crc kubenswrapper[4902]: I1202 15:41:05.102034 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-67clb_51ddf240-5cda-4f89-831f-4a20ce9997ed/ovn-controller/0.log" Dec 02 15:41:05 crc kubenswrapper[4902]: I1202 15:41:05.263843 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-68q42_a39354f8-da6e-43a2-9a3d-49e42af19590/openstack-network-exporter/0.log" Dec 02 15:41:05 crc kubenswrapper[4902]: I1202 15:41:05.493856 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dkmxc_77a6d129-70f2-4c3c-b394-1ed4cfaf5104/ovsdb-server-init/0.log" Dec 02 15:41:05 crc kubenswrapper[4902]: I1202 15:41:05.646909 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dkmxc_77a6d129-70f2-4c3c-b394-1ed4cfaf5104/ovs-vswitchd/0.log" Dec 02 15:41:05 crc kubenswrapper[4902]: I1202 15:41:05.649436 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dkmxc_77a6d129-70f2-4c3c-b394-1ed4cfaf5104/ovsdb-server-init/0.log" Dec 02 15:41:05 crc kubenswrapper[4902]: I1202 15:41:05.725333 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_50eb495f-ceaa-4583-b3d6-42ba67a92160/nova-metadata-metadata/0.log" Dec 02 15:41:05 crc kubenswrapper[4902]: I1202 15:41:05.848118 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dkmxc_77a6d129-70f2-4c3c-b394-1ed4cfaf5104/ovsdb-server/0.log" Dec 02 15:41:05 crc kubenswrapper[4902]: I1202 15:41:05.908000 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-qfls9_af830218-8303-46c8-a31a-a33fa89d0034/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:41:06 crc kubenswrapper[4902]: I1202 15:41:06.056458 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_412330a8-4fac-4700-a5f8-5054af56f44b/ovn-northd/0.log" Dec 02 15:41:06 crc kubenswrapper[4902]: I1202 15:41:06.056518 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_412330a8-4fac-4700-a5f8-5054af56f44b/openstack-network-exporter/0.log" Dec 02 15:41:06 crc kubenswrapper[4902]: I1202 15:41:06.084916 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905"} Dec 02 15:41:07 crc kubenswrapper[4902]: I1202 15:41:07.081245 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_79db3800-d728-4ace-a34f-37d44c9c4892/ovsdbserver-nb/0.log" Dec 02 15:41:07 crc kubenswrapper[4902]: I1202 15:41:07.108288 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_2eb37d9d-d6ca-4b36-a153-47d2b417e26b/openstack-network-exporter/0.log" Dec 02 15:41:07 crc kubenswrapper[4902]: I1202 15:41:07.135781 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_79db3800-d728-4ace-a34f-37d44c9c4892/openstack-network-exporter/0.log" Dec 02 15:41:07 crc kubenswrapper[4902]: I1202 15:41:07.266639 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_2eb37d9d-d6ca-4b36-a153-47d2b417e26b/ovsdbserver-sb/0.log" Dec 02 15:41:07 crc kubenswrapper[4902]: I1202 15:41:07.462410 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-786fb786b8-w8tbv_6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3/placement-api/0.log" Dec 02 15:41:07 crc kubenswrapper[4902]: I1202 15:41:07.550317 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_e67ef796-c39e-45e9-bbcf-bfb0fd77dff4/init-config-reloader/0.log" Dec 02 15:41:07 crc kubenswrapper[4902]: I1202 15:41:07.577029 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-786fb786b8-w8tbv_6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3/placement-log/0.log" Dec 02 15:41:07 crc kubenswrapper[4902]: I1202 15:41:07.739349 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_e67ef796-c39e-45e9-bbcf-bfb0fd77dff4/init-config-reloader/0.log" Dec 02 15:41:07 crc kubenswrapper[4902]: I1202 15:41:07.750505 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_e67ef796-c39e-45e9-bbcf-bfb0fd77dff4/prometheus/0.log" Dec 02 15:41:07 crc kubenswrapper[4902]: I1202 15:41:07.757023 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_e67ef796-c39e-45e9-bbcf-bfb0fd77dff4/config-reloader/0.log" Dec 02 15:41:07 crc kubenswrapper[4902]: I1202 15:41:07.813821 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_e67ef796-c39e-45e9-bbcf-bfb0fd77dff4/thanos-sidecar/0.log" Dec 02 15:41:07 crc kubenswrapper[4902]: I1202 15:41:07.949165 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1640e5ea-d80e-4302-9690-5ca9efaa9879/setup-container/0.log" Dec 02 15:41:08 crc kubenswrapper[4902]: I1202 15:41:08.601029 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1640e5ea-d80e-4302-9690-5ca9efaa9879/setup-container/0.log" Dec 02 15:41:08 crc kubenswrapper[4902]: I1202 15:41:08.610835 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1640e5ea-d80e-4302-9690-5ca9efaa9879/rabbitmq/0.log" Dec 02 15:41:08 crc kubenswrapper[4902]: I1202 15:41:08.681341 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6e03bb87-25fa-48ee-8e1a-20309d4d3a4b/setup-container/0.log" Dec 02 15:41:08 crc kubenswrapper[4902]: I1202 15:41:08.853306 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6e03bb87-25fa-48ee-8e1a-20309d4d3a4b/setup-container/0.log" Dec 02 15:41:08 crc kubenswrapper[4902]: I1202 15:41:08.954940 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf_0327d28d-2fe0-4940-b8b7-5a805b1d89bc/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:41:09 crc kubenswrapper[4902]: I1202 15:41:09.010577 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6e03bb87-25fa-48ee-8e1a-20309d4d3a4b/rabbitmq/0.log" Dec 02 15:41:09 crc kubenswrapper[4902]: I1202 15:41:09.188384 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-v95mc_4c0e3211-2564-459f-b072-c6d07ac1da5c/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:41:09 crc kubenswrapper[4902]: I1202 15:41:09.226054 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf_b851cac6-d9e2-4a83-92d8-dbd09ee7e38d/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:41:09 crc kubenswrapper[4902]: I1202 15:41:09.343361 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-4zh2f_2c20b53d-a975-44ae-8ec3-126956995caf/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:41:09 crc kubenswrapper[4902]: I1202 15:41:09.495958 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-jgmdd_77add5e2-5a0c-4a99-a46e-80645017ee83/ssh-known-hosts-edpm-deployment/0.log" Dec 02 15:41:09 crc kubenswrapper[4902]: I1202 15:41:09.676520 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-65c8455667-7wzb6_8804e694-63ac-4278-8672-56c862db1007/proxy-server/0.log" Dec 02 15:41:09 crc kubenswrapper[4902]: I1202 15:41:09.796046 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-4x7nc_3f809093-c39f-40fe-a785-69a1edd2bdf9/swift-ring-rebalance/0.log" Dec 02 15:41:09 crc kubenswrapper[4902]: I1202 15:41:09.808409 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-65c8455667-7wzb6_8804e694-63ac-4278-8672-56c862db1007/proxy-httpd/0.log" Dec 02 15:41:09 crc kubenswrapper[4902]: I1202 15:41:09.953545 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/account-auditor/0.log" Dec 02 15:41:09 crc kubenswrapper[4902]: I1202 15:41:09.999070 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/account-reaper/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.097579 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/account-replicator/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.099793 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/account-server/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.213216 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/container-auditor/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.229023 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/container-replicator/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.322812 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/container-server/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.336642 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/container-updater/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.467860 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/object-expirer/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.488642 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/object-auditor/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.520708 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_954f9858-8849-4b60-a1dd-1fddd9aaf65e/memcached/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.526738 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/object-replicator/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.619277 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/object-server/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.628253 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/object-updater/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.681892 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/rsync/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.705546 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/swift-recon-cron/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.847586 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w_5bbd6a08-3913-4037-81e5-f7fd18479977/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.889673 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_3469f18f-c530-4a00-91ba-95720c45b4c2/tempest-tests-tempest-tests-runner/0.log" Dec 02 15:41:10 crc kubenswrapper[4902]: I1202 15:41:10.955603 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_ddbe7574-fbe4-4d9a-a50d-d3f1986e0b4d/test-operator-logs-container/0.log" Dec 02 15:41:11 crc kubenswrapper[4902]: I1202 15:41:11.063050 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4_10beb027-7c11-43eb-8d82-782f41f49b1b/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:41:11 crc kubenswrapper[4902]: I1202 15:41:11.629846 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-applier-0_644d8761-1b3a-4a6f-922f-596ae390bacf/watcher-applier/0.log" Dec 02 15:41:12 crc kubenswrapper[4902]: I1202 15:41:12.103155 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_479a72ce-7b6b-4da9-8315-236e285a6680/watcher-api-log/0.log" Dec 02 15:41:12 crc kubenswrapper[4902]: I1202 15:41:12.490134 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_d5838a49-52eb-4485-8eda-14772a0f60bd/watcher-decision-engine/0.log" Dec 02 15:41:14 crc kubenswrapper[4902]: I1202 15:41:14.070821 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_479a72ce-7b6b-4da9-8315-236e285a6680/watcher-api/0.log" Dec 02 15:41:36 crc kubenswrapper[4902]: I1202 15:41:36.477647 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz_b5a8d6b1-20ad-4539-8696-c8b30997be69/util/0.log" Dec 02 15:41:36 crc kubenswrapper[4902]: I1202 15:41:36.609343 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz_b5a8d6b1-20ad-4539-8696-c8b30997be69/util/0.log" Dec 02 15:41:36 crc kubenswrapper[4902]: I1202 15:41:36.611172 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz_b5a8d6b1-20ad-4539-8696-c8b30997be69/pull/0.log" Dec 02 15:41:36 crc kubenswrapper[4902]: I1202 15:41:36.682507 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz_b5a8d6b1-20ad-4539-8696-c8b30997be69/pull/0.log" Dec 02 15:41:37 crc kubenswrapper[4902]: I1202 15:41:37.021965 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz_b5a8d6b1-20ad-4539-8696-c8b30997be69/util/0.log" Dec 02 15:41:37 crc kubenswrapper[4902]: I1202 15:41:37.048197 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz_b5a8d6b1-20ad-4539-8696-c8b30997be69/pull/0.log" Dec 02 15:41:37 crc kubenswrapper[4902]: I1202 15:41:37.112113 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz_b5a8d6b1-20ad-4539-8696-c8b30997be69/extract/0.log" Dec 02 15:41:37 crc kubenswrapper[4902]: I1202 15:41:37.208010 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-jgk7w_ebf2d80f-02cb-4f53-a5b1-67280d3cd74b/kube-rbac-proxy/0.log" Dec 02 15:41:37 crc kubenswrapper[4902]: I1202 15:41:37.293929 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-jgk7w_ebf2d80f-02cb-4f53-a5b1-67280d3cd74b/manager/0.log" Dec 02 15:41:37 crc kubenswrapper[4902]: I1202 15:41:37.343231 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-j55rx_605097d2-e1a6-481d-91e2-fd3b638ad7b1/kube-rbac-proxy/0.log" Dec 02 15:41:37 crc kubenswrapper[4902]: I1202 15:41:37.452072 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-j55rx_605097d2-e1a6-481d-91e2-fd3b638ad7b1/manager/0.log" Dec 02 15:41:37 crc kubenswrapper[4902]: I1202 15:41:37.501586 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-v8th7_eb55d3d7-65ac-4f76-9cb1-210cff99a5bd/kube-rbac-proxy/0.log" Dec 02 15:41:37 crc kubenswrapper[4902]: I1202 15:41:37.580403 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-v8th7_eb55d3d7-65ac-4f76-9cb1-210cff99a5bd/manager/0.log" Dec 02 15:41:37 crc kubenswrapper[4902]: I1202 15:41:37.694396 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-5w9nk_9f4cfa25-c827-4b4c-ab57-8dc0221d30b3/kube-rbac-proxy/0.log" Dec 02 15:41:37 crc kubenswrapper[4902]: I1202 15:41:37.783514 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-5w9nk_9f4cfa25-c827-4b4c-ab57-8dc0221d30b3/manager/0.log" Dec 02 15:41:37 crc kubenswrapper[4902]: I1202 15:41:37.823715 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-9nw45_4ec2dc11-e22a-40c1-926b-d987e05b8d17/kube-rbac-proxy/0.log" Dec 02 15:41:37 crc kubenswrapper[4902]: I1202 15:41:37.919742 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-9nw45_4ec2dc11-e22a-40c1-926b-d987e05b8d17/manager/0.log" Dec 02 15:41:38 crc kubenswrapper[4902]: I1202 15:41:38.040422 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-q688c_3c045445-0a69-416e-a9c6-868843e4c3e9/kube-rbac-proxy/0.log" Dec 02 15:41:38 crc kubenswrapper[4902]: I1202 15:41:38.100098 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-q688c_3c045445-0a69-416e-a9c6-868843e4c3e9/manager/0.log" Dec 02 15:41:38 crc kubenswrapper[4902]: I1202 15:41:38.201621 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-bwpd5_9c65e1ba-dd1f-4d63-ae34-c18525c76bcf/kube-rbac-proxy/0.log" Dec 02 15:41:38 crc kubenswrapper[4902]: I1202 15:41:38.316592 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-wdt4v_16c02339-b964-4c10-8beb-32402af37c34/kube-rbac-proxy/0.log" Dec 02 15:41:38 crc kubenswrapper[4902]: I1202 15:41:38.442785 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-wdt4v_16c02339-b964-4c10-8beb-32402af37c34/manager/0.log" Dec 02 15:41:38 crc kubenswrapper[4902]: I1202 15:41:38.461304 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-bwpd5_9c65e1ba-dd1f-4d63-ae34-c18525c76bcf/manager/0.log" Dec 02 15:41:38 crc kubenswrapper[4902]: I1202 15:41:38.539436 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-8bnj2_4d401ff5-aff3-4d1c-9fae-d56e1fea07f6/kube-rbac-proxy/0.log" Dec 02 15:41:38 crc kubenswrapper[4902]: I1202 15:41:38.664840 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-xvrql_fd510a47-813b-4ec1-953b-cfefa2fb890f/kube-rbac-proxy/0.log" Dec 02 15:41:38 crc kubenswrapper[4902]: I1202 15:41:38.704141 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-8bnj2_4d401ff5-aff3-4d1c-9fae-d56e1fea07f6/manager/0.log" Dec 02 15:41:38 crc kubenswrapper[4902]: I1202 15:41:38.771307 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-xvrql_fd510a47-813b-4ec1-953b-cfefa2fb890f/manager/0.log" Dec 02 15:41:38 crc kubenswrapper[4902]: I1202 15:41:38.896754 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-gnl4x_0d72b6fb-7ab2-4a48-bdca-17f2794daf3e/kube-rbac-proxy/0.log" Dec 02 15:41:38 crc kubenswrapper[4902]: I1202 15:41:38.913707 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-gnl4x_0d72b6fb-7ab2-4a48-bdca-17f2794daf3e/manager/0.log" Dec 02 15:41:39 crc kubenswrapper[4902]: I1202 15:41:39.071335 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-qh5qm_57034fef-009f-4593-92cb-67cdde94f9e0/kube-rbac-proxy/0.log" Dec 02 15:41:39 crc kubenswrapper[4902]: I1202 15:41:39.126540 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-qh5qm_57034fef-009f-4593-92cb-67cdde94f9e0/manager/0.log" Dec 02 15:41:39 crc kubenswrapper[4902]: I1202 15:41:39.224124 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-nhklg_5603e150-66d0-4016-a762-fd636f211c11/kube-rbac-proxy/0.log" Dec 02 15:41:39 crc kubenswrapper[4902]: I1202 15:41:39.358744 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-87z5d_a056d211-44a9-4585-bbf0-fc0413a57099/kube-rbac-proxy/0.log" Dec 02 15:41:39 crc kubenswrapper[4902]: I1202 15:41:39.377967 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-nhklg_5603e150-66d0-4016-a762-fd636f211c11/manager/0.log" Dec 02 15:41:39 crc kubenswrapper[4902]: I1202 15:41:39.430125 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-87z5d_a056d211-44a9-4585-bbf0-fc0413a57099/manager/0.log" Dec 02 15:41:39 crc kubenswrapper[4902]: I1202 15:41:39.560357 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r_05625664-e0a8-4c8c-904e-69e4b3b7df9b/kube-rbac-proxy/0.log" Dec 02 15:41:39 crc kubenswrapper[4902]: I1202 15:41:39.588580 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r_05625664-e0a8-4c8c-904e-69e4b3b7df9b/manager/0.log" Dec 02 15:41:39 crc kubenswrapper[4902]: I1202 15:41:39.946539 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-fd5ddc88-7hv7g_4cdeb940-6336-43a7-bec3-bf831b83bce7/operator/0.log" Dec 02 15:41:40 crc kubenswrapper[4902]: I1202 15:41:40.024875 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-jw4ms_5a2a1e6b-0712-4b08-8dca-36da17c57d51/registry-server/0.log" Dec 02 15:41:40 crc kubenswrapper[4902]: I1202 15:41:40.192029 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-pnphg_3838a55d-af10-4de8-ad85-bdbd8e49ed62/kube-rbac-proxy/0.log" Dec 02 15:41:40 crc kubenswrapper[4902]: I1202 15:41:40.323119 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-pnphg_3838a55d-af10-4de8-ad85-bdbd8e49ed62/manager/0.log" Dec 02 15:41:40 crc kubenswrapper[4902]: I1202 15:41:40.386291 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-ghmml_437c097d-1c14-4668-a3c6-86802ed4a253/kube-rbac-proxy/0.log" Dec 02 15:41:40 crc kubenswrapper[4902]: I1202 15:41:40.461461 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-ghmml_437c097d-1c14-4668-a3c6-86802ed4a253/manager/0.log" Dec 02 15:41:40 crc kubenswrapper[4902]: I1202 15:41:40.607826 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-gnd6s_a4650041-03e0-4119-b353-281b9200355c/operator/0.log" Dec 02 15:41:40 crc kubenswrapper[4902]: I1202 15:41:40.742333 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-cdphg_fc37cd76-791d-468a-9470-da5138c96d34/kube-rbac-proxy/0.log" Dec 02 15:41:40 crc kubenswrapper[4902]: I1202 15:41:40.875255 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-cdphg_fc37cd76-791d-468a-9470-da5138c96d34/manager/0.log" Dec 02 15:41:40 crc kubenswrapper[4902]: I1202 15:41:40.918238 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-tjm2s_a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c/kube-rbac-proxy/0.log" Dec 02 15:41:41 crc kubenswrapper[4902]: I1202 15:41:41.114965 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-mhdgs_dd8c8a81-b5af-4a0c-8640-f4455c09abc1/kube-rbac-proxy/0.log" Dec 02 15:41:41 crc kubenswrapper[4902]: I1202 15:41:41.165938 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-575d4674bc-b4fl5_d319072a-bef8-4511-a876-dc7c6e59817e/manager/0.log" Dec 02 15:41:41 crc kubenswrapper[4902]: I1202 15:41:41.190592 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-tjm2s_a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c/manager/0.log" Dec 02 15:41:41 crc kubenswrapper[4902]: I1202 15:41:41.248428 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-mhdgs_dd8c8a81-b5af-4a0c-8640-f4455c09abc1/manager/0.log" Dec 02 15:41:41 crc kubenswrapper[4902]: I1202 15:41:41.337817 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-64fcb99cdb-nkttd_0cbcce3d-353f-44fc-896e-75c3a8b58c21/kube-rbac-proxy/0.log" Dec 02 15:41:41 crc kubenswrapper[4902]: I1202 15:41:41.439912 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-64fcb99cdb-nkttd_0cbcce3d-353f-44fc-896e-75c3a8b58c21/manager/0.log" Dec 02 15:42:01 crc kubenswrapper[4902]: I1202 15:42:01.956244 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-rxhgz_07de0872-46f2-4a69-af4c-a811e7ee3a8d/control-plane-machine-set-operator/0.log" Dec 02 15:42:02 crc kubenswrapper[4902]: I1202 15:42:02.003744 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-mxmxt_93abc37a-cc70-4b86-bd3f-4d3945d029f4/kube-rbac-proxy/0.log" Dec 02 15:42:02 crc kubenswrapper[4902]: I1202 15:42:02.119965 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-mxmxt_93abc37a-cc70-4b86-bd3f-4d3945d029f4/machine-api-operator/0.log" Dec 02 15:42:15 crc kubenswrapper[4902]: I1202 15:42:15.365744 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-c8sjz_0c33663a-0d99-4a94-bb43-46f102098870/cert-manager-controller/0.log" Dec 02 15:42:15 crc kubenswrapper[4902]: I1202 15:42:15.502443 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-bj9d5_c4d2337b-c3b1-4759-999d-ab47b754a817/cert-manager-cainjector/0.log" Dec 02 15:42:15 crc kubenswrapper[4902]: I1202 15:42:15.565105 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-wvs9t_1b016533-08b9-464a-8956-1c236d28e036/cert-manager-webhook/0.log" Dec 02 15:42:29 crc kubenswrapper[4902]: I1202 15:42:29.024386 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-7nbg8_6382b3e8-8f0e-421b-9612-0a398fd0f994/nmstate-console-plugin/0.log" Dec 02 15:42:29 crc kubenswrapper[4902]: I1202 15:42:29.188344 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-c2qjr_4d54d7c2-5d4a-41b5-9949-96b8ab11df5e/nmstate-handler/0.log" Dec 02 15:42:29 crc kubenswrapper[4902]: I1202 15:42:29.227611 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-zvsm4_ad4e56ef-44e2-46c9-a609-808e4c96fa2e/kube-rbac-proxy/0.log" Dec 02 15:42:29 crc kubenswrapper[4902]: I1202 15:42:29.288645 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-zvsm4_ad4e56ef-44e2-46c9-a609-808e4c96fa2e/nmstate-metrics/0.log" Dec 02 15:42:29 crc kubenswrapper[4902]: I1202 15:42:29.421520 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-hpnm5_8d059e00-4b87-410a-8613-a52e1263dc9f/nmstate-operator/0.log" Dec 02 15:42:29 crc kubenswrapper[4902]: I1202 15:42:29.540581 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-8g56t_b31a81b9-0e67-4858-ae54-304c17fd0495/nmstate-webhook/0.log" Dec 02 15:42:46 crc kubenswrapper[4902]: I1202 15:42:46.028717 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-fvt5m_673f9521-fb68-4ec8-9190-cf0315b14280/kube-rbac-proxy/0.log" Dec 02 15:42:46 crc kubenswrapper[4902]: I1202 15:42:46.172978 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-fvt5m_673f9521-fb68-4ec8-9190-cf0315b14280/controller/0.log" Dec 02 15:42:46 crc kubenswrapper[4902]: I1202 15:42:46.244440 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-frr-files/0.log" Dec 02 15:42:46 crc kubenswrapper[4902]: I1202 15:42:46.464927 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-reloader/0.log" Dec 02 15:42:46 crc kubenswrapper[4902]: I1202 15:42:46.465396 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-reloader/0.log" Dec 02 15:42:46 crc kubenswrapper[4902]: I1202 15:42:46.489733 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-metrics/0.log" Dec 02 15:42:46 crc kubenswrapper[4902]: I1202 15:42:46.496042 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-frr-files/0.log" Dec 02 15:42:46 crc kubenswrapper[4902]: I1202 15:42:46.680726 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-frr-files/0.log" Dec 02 15:42:46 crc kubenswrapper[4902]: I1202 15:42:46.692454 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-metrics/0.log" Dec 02 15:42:46 crc kubenswrapper[4902]: I1202 15:42:46.693855 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-metrics/0.log" Dec 02 15:42:46 crc kubenswrapper[4902]: I1202 15:42:46.733372 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-reloader/0.log" Dec 02 15:42:46 crc kubenswrapper[4902]: I1202 15:42:46.887949 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-frr-files/0.log" Dec 02 15:42:46 crc kubenswrapper[4902]: I1202 15:42:46.907250 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-reloader/0.log" Dec 02 15:42:46 crc kubenswrapper[4902]: I1202 15:42:46.918549 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-metrics/0.log" Dec 02 15:42:46 crc kubenswrapper[4902]: I1202 15:42:46.948004 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/controller/0.log" Dec 02 15:42:47 crc kubenswrapper[4902]: I1202 15:42:47.053690 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/frr-metrics/0.log" Dec 02 15:42:47 crc kubenswrapper[4902]: I1202 15:42:47.170935 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/kube-rbac-proxy/0.log" Dec 02 15:42:47 crc kubenswrapper[4902]: I1202 15:42:47.199766 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/kube-rbac-proxy-frr/0.log" Dec 02 15:42:47 crc kubenswrapper[4902]: I1202 15:42:47.302829 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/reloader/0.log" Dec 02 15:42:47 crc kubenswrapper[4902]: I1202 15:42:47.427373 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-p7rgx_e15d73a9-a10f-40b9-8a07-ae14a383d2ba/frr-k8s-webhook-server/0.log" Dec 02 15:42:47 crc kubenswrapper[4902]: I1202 15:42:47.633849 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-644fb8ffcc-frgm7_b75cd83c-20e7-42ea-a0f5-be6d28430a2e/manager/0.log" Dec 02 15:42:47 crc kubenswrapper[4902]: I1202 15:42:47.775393 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-56874848dd-bf6qw_25920d0f-ef42-4402-82a1-c643307041f5/webhook-server/0.log" Dec 02 15:42:47 crc kubenswrapper[4902]: I1202 15:42:47.981262 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-rk7ql_4650d7ad-6b89-444d-a21d-3fad55e3a7b1/kube-rbac-proxy/0.log" Dec 02 15:42:48 crc kubenswrapper[4902]: I1202 15:42:48.452334 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-rk7ql_4650d7ad-6b89-444d-a21d-3fad55e3a7b1/speaker/0.log" Dec 02 15:42:48 crc kubenswrapper[4902]: I1202 15:42:48.538133 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/frr/0.log" Dec 02 15:43:02 crc kubenswrapper[4902]: I1202 15:43:02.835318 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl_9abb66fe-7d84-40db-981f-b19ac735c12a/util/0.log" Dec 02 15:43:03 crc kubenswrapper[4902]: I1202 15:43:03.012280 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl_9abb66fe-7d84-40db-981f-b19ac735c12a/pull/0.log" Dec 02 15:43:03 crc kubenswrapper[4902]: I1202 15:43:03.017889 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl_9abb66fe-7d84-40db-981f-b19ac735c12a/util/0.log" Dec 02 15:43:03 crc kubenswrapper[4902]: I1202 15:43:03.061755 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl_9abb66fe-7d84-40db-981f-b19ac735c12a/pull/0.log" Dec 02 15:43:03 crc kubenswrapper[4902]: I1202 15:43:03.209402 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl_9abb66fe-7d84-40db-981f-b19ac735c12a/util/0.log" Dec 02 15:43:03 crc kubenswrapper[4902]: I1202 15:43:03.213447 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl_9abb66fe-7d84-40db-981f-b19ac735c12a/extract/0.log" Dec 02 15:43:03 crc kubenswrapper[4902]: I1202 15:43:03.236677 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl_9abb66fe-7d84-40db-981f-b19ac735c12a/pull/0.log" Dec 02 15:43:03 crc kubenswrapper[4902]: I1202 15:43:03.395007 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m_462c708d-094f-4814-a61f-a94b5b493956/util/0.log" Dec 02 15:43:03 crc kubenswrapper[4902]: I1202 15:43:03.570449 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m_462c708d-094f-4814-a61f-a94b5b493956/util/0.log" Dec 02 15:43:03 crc kubenswrapper[4902]: I1202 15:43:03.571399 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m_462c708d-094f-4814-a61f-a94b5b493956/pull/0.log" Dec 02 15:43:03 crc kubenswrapper[4902]: I1202 15:43:03.588544 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m_462c708d-094f-4814-a61f-a94b5b493956/pull/0.log" Dec 02 15:43:03 crc kubenswrapper[4902]: I1202 15:43:03.768507 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m_462c708d-094f-4814-a61f-a94b5b493956/util/0.log" Dec 02 15:43:03 crc kubenswrapper[4902]: I1202 15:43:03.781012 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m_462c708d-094f-4814-a61f-a94b5b493956/pull/0.log" Dec 02 15:43:03 crc kubenswrapper[4902]: I1202 15:43:03.781694 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m_462c708d-094f-4814-a61f-a94b5b493956/extract/0.log" Dec 02 15:43:03 crc kubenswrapper[4902]: I1202 15:43:03.975307 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6_b5fb7258-c7bd-4d20-b7a8-d636f970a8ac/util/0.log" Dec 02 15:43:04 crc kubenswrapper[4902]: I1202 15:43:04.139874 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6_b5fb7258-c7bd-4d20-b7a8-d636f970a8ac/pull/0.log" Dec 02 15:43:04 crc kubenswrapper[4902]: I1202 15:43:04.146404 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6_b5fb7258-c7bd-4d20-b7a8-d636f970a8ac/util/0.log" Dec 02 15:43:04 crc kubenswrapper[4902]: I1202 15:43:04.197239 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6_b5fb7258-c7bd-4d20-b7a8-d636f970a8ac/pull/0.log" Dec 02 15:43:04 crc kubenswrapper[4902]: I1202 15:43:04.340472 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6_b5fb7258-c7bd-4d20-b7a8-d636f970a8ac/util/0.log" Dec 02 15:43:04 crc kubenswrapper[4902]: I1202 15:43:04.372208 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6_b5fb7258-c7bd-4d20-b7a8-d636f970a8ac/extract/0.log" Dec 02 15:43:04 crc kubenswrapper[4902]: I1202 15:43:04.405635 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6_b5fb7258-c7bd-4d20-b7a8-d636f970a8ac/pull/0.log" Dec 02 15:43:04 crc kubenswrapper[4902]: I1202 15:43:04.555510 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2hb48_5a130427-ec6d-4743-97a9-42bb4cb308ca/extract-utilities/0.log" Dec 02 15:43:04 crc kubenswrapper[4902]: I1202 15:43:04.713334 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2hb48_5a130427-ec6d-4743-97a9-42bb4cb308ca/extract-content/0.log" Dec 02 15:43:04 crc kubenswrapper[4902]: I1202 15:43:04.724261 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2hb48_5a130427-ec6d-4743-97a9-42bb4cb308ca/extract-content/0.log" Dec 02 15:43:04 crc kubenswrapper[4902]: I1202 15:43:04.726063 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2hb48_5a130427-ec6d-4743-97a9-42bb4cb308ca/extract-utilities/0.log" Dec 02 15:43:04 crc kubenswrapper[4902]: I1202 15:43:04.885312 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2hb48_5a130427-ec6d-4743-97a9-42bb4cb308ca/extract-content/0.log" Dec 02 15:43:04 crc kubenswrapper[4902]: I1202 15:43:04.895173 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2hb48_5a130427-ec6d-4743-97a9-42bb4cb308ca/extract-utilities/0.log" Dec 02 15:43:05 crc kubenswrapper[4902]: I1202 15:43:05.146122 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wcqbz_22c1a675-a060-4372-ab73-1c25c3f3fa70/extract-utilities/0.log" Dec 02 15:43:05 crc kubenswrapper[4902]: I1202 15:43:05.292795 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wcqbz_22c1a675-a060-4372-ab73-1c25c3f3fa70/extract-utilities/0.log" Dec 02 15:43:05 crc kubenswrapper[4902]: I1202 15:43:05.364266 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wcqbz_22c1a675-a060-4372-ab73-1c25c3f3fa70/extract-content/0.log" Dec 02 15:43:05 crc kubenswrapper[4902]: I1202 15:43:05.372903 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wcqbz_22c1a675-a060-4372-ab73-1c25c3f3fa70/extract-content/0.log" Dec 02 15:43:05 crc kubenswrapper[4902]: I1202 15:43:05.591293 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wcqbz_22c1a675-a060-4372-ab73-1c25c3f3fa70/extract-content/0.log" Dec 02 15:43:05 crc kubenswrapper[4902]: I1202 15:43:05.634406 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wcqbz_22c1a675-a060-4372-ab73-1c25c3f3fa70/extract-utilities/0.log" Dec 02 15:43:05 crc kubenswrapper[4902]: I1202 15:43:05.668637 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2hb48_5a130427-ec6d-4743-97a9-42bb4cb308ca/registry-server/0.log" Dec 02 15:43:05 crc kubenswrapper[4902]: I1202 15:43:05.873222 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vp2bb_bfa7e05a-f673-4432-a9c5-e33f67589a10/marketplace-operator/0.log" Dec 02 15:43:05 crc kubenswrapper[4902]: I1202 15:43:05.992793 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wmg9h_d366d244-dcc5-4685-9ee5-73990c8d6cb6/extract-utilities/0.log" Dec 02 15:43:06 crc kubenswrapper[4902]: I1202 15:43:06.152484 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wmg9h_d366d244-dcc5-4685-9ee5-73990c8d6cb6/extract-utilities/0.log" Dec 02 15:43:06 crc kubenswrapper[4902]: I1202 15:43:06.209710 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wmg9h_d366d244-dcc5-4685-9ee5-73990c8d6cb6/extract-content/0.log" Dec 02 15:43:06 crc kubenswrapper[4902]: I1202 15:43:06.230914 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wmg9h_d366d244-dcc5-4685-9ee5-73990c8d6cb6/extract-content/0.log" Dec 02 15:43:06 crc kubenswrapper[4902]: I1202 15:43:06.371958 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wcqbz_22c1a675-a060-4372-ab73-1c25c3f3fa70/registry-server/0.log" Dec 02 15:43:06 crc kubenswrapper[4902]: I1202 15:43:06.429883 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wmg9h_d366d244-dcc5-4685-9ee5-73990c8d6cb6/extract-content/0.log" Dec 02 15:43:06 crc kubenswrapper[4902]: I1202 15:43:06.430902 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wmg9h_d366d244-dcc5-4685-9ee5-73990c8d6cb6/extract-utilities/0.log" Dec 02 15:43:06 crc kubenswrapper[4902]: I1202 15:43:06.565622 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ktv4s_ac72e289-8af7-4534-a325-fddddca513cc/extract-utilities/0.log" Dec 02 15:43:06 crc kubenswrapper[4902]: I1202 15:43:06.685512 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wmg9h_d366d244-dcc5-4685-9ee5-73990c8d6cb6/registry-server/0.log" Dec 02 15:43:06 crc kubenswrapper[4902]: I1202 15:43:06.826356 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ktv4s_ac72e289-8af7-4534-a325-fddddca513cc/extract-content/0.log" Dec 02 15:43:06 crc kubenswrapper[4902]: I1202 15:43:06.827819 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ktv4s_ac72e289-8af7-4534-a325-fddddca513cc/extract-utilities/0.log" Dec 02 15:43:06 crc kubenswrapper[4902]: I1202 15:43:06.871586 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ktv4s_ac72e289-8af7-4534-a325-fddddca513cc/extract-content/0.log" Dec 02 15:43:07 crc kubenswrapper[4902]: I1202 15:43:07.046442 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ktv4s_ac72e289-8af7-4534-a325-fddddca513cc/extract-utilities/0.log" Dec 02 15:43:07 crc kubenswrapper[4902]: I1202 15:43:07.100434 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ktv4s_ac72e289-8af7-4534-a325-fddddca513cc/extract-content/0.log" Dec 02 15:43:07 crc kubenswrapper[4902]: I1202 15:43:07.215839 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ktv4s_ac72e289-8af7-4534-a325-fddddca513cc/registry-server/0.log" Dec 02 15:43:21 crc kubenswrapper[4902]: I1202 15:43:21.277419 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-sbwhh_cd642e4b-b7fa-42d6-a4ca-629c74ff1f5e/prometheus-operator/0.log" Dec 02 15:43:22 crc kubenswrapper[4902]: I1202 15:43:22.096890 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-698f669544-chdpg_8b03406b-b481-4193-b543-a1f91deefefd/prometheus-operator-admission-webhook/0.log" Dec 02 15:43:22 crc kubenswrapper[4902]: I1202 15:43:22.141340 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-698f669544-qdwg6_b411880c-1f14-41da-bbc5-85543ddf20d7/prometheus-operator-admission-webhook/0.log" Dec 02 15:43:22 crc kubenswrapper[4902]: I1202 15:43:22.292768 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-tghq9_c7fe3eed-6ecf-406a-9552-4f2a601eb860/operator/0.log" Dec 02 15:43:22 crc kubenswrapper[4902]: I1202 15:43:22.329395 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-hwvtn_f7d12c85-7736-436e-a273-03025b1fc05b/perses-operator/0.log" Dec 02 15:43:34 crc kubenswrapper[4902]: I1202 15:43:34.731970 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:43:34 crc kubenswrapper[4902]: I1202 15:43:34.732554 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:43:41 crc kubenswrapper[4902]: E1202 15:43:41.236832 4902 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.251:43730->38.102.83.251:38083: write tcp 38.102.83.251:43730->38.102.83.251:38083: write: broken pipe Dec 02 15:43:59 crc kubenswrapper[4902]: I1202 15:43:59.758551 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-99chf"] Dec 02 15:43:59 crc kubenswrapper[4902]: E1202 15:43:59.759755 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1a69a76-ea22-40f9-a764-93999aaace9e" containerName="container-00" Dec 02 15:43:59 crc kubenswrapper[4902]: I1202 15:43:59.759775 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1a69a76-ea22-40f9-a764-93999aaace9e" containerName="container-00" Dec 02 15:43:59 crc kubenswrapper[4902]: I1202 15:43:59.760081 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1a69a76-ea22-40f9-a764-93999aaace9e" containerName="container-00" Dec 02 15:43:59 crc kubenswrapper[4902]: I1202 15:43:59.761960 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:43:59 crc kubenswrapper[4902]: I1202 15:43:59.778888 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-99chf"] Dec 02 15:43:59 crc kubenswrapper[4902]: I1202 15:43:59.807053 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/104ff275-5747-44f0-8262-792404a685f8-utilities\") pod \"redhat-marketplace-99chf\" (UID: \"104ff275-5747-44f0-8262-792404a685f8\") " pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:43:59 crc kubenswrapper[4902]: I1202 15:43:59.807311 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/104ff275-5747-44f0-8262-792404a685f8-catalog-content\") pod \"redhat-marketplace-99chf\" (UID: \"104ff275-5747-44f0-8262-792404a685f8\") " pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:43:59 crc kubenswrapper[4902]: I1202 15:43:59.807438 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv8bl\" (UniqueName: \"kubernetes.io/projected/104ff275-5747-44f0-8262-792404a685f8-kube-api-access-fv8bl\") pod \"redhat-marketplace-99chf\" (UID: \"104ff275-5747-44f0-8262-792404a685f8\") " pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:43:59 crc kubenswrapper[4902]: I1202 15:43:59.909675 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv8bl\" (UniqueName: \"kubernetes.io/projected/104ff275-5747-44f0-8262-792404a685f8-kube-api-access-fv8bl\") pod \"redhat-marketplace-99chf\" (UID: \"104ff275-5747-44f0-8262-792404a685f8\") " pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:43:59 crc kubenswrapper[4902]: I1202 15:43:59.909722 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/104ff275-5747-44f0-8262-792404a685f8-catalog-content\") pod \"redhat-marketplace-99chf\" (UID: \"104ff275-5747-44f0-8262-792404a685f8\") " pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:43:59 crc kubenswrapper[4902]: I1202 15:43:59.909793 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/104ff275-5747-44f0-8262-792404a685f8-utilities\") pod \"redhat-marketplace-99chf\" (UID: \"104ff275-5747-44f0-8262-792404a685f8\") " pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:43:59 crc kubenswrapper[4902]: I1202 15:43:59.910528 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/104ff275-5747-44f0-8262-792404a685f8-utilities\") pod \"redhat-marketplace-99chf\" (UID: \"104ff275-5747-44f0-8262-792404a685f8\") " pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:43:59 crc kubenswrapper[4902]: I1202 15:43:59.910525 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/104ff275-5747-44f0-8262-792404a685f8-catalog-content\") pod \"redhat-marketplace-99chf\" (UID: \"104ff275-5747-44f0-8262-792404a685f8\") " pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:43:59 crc kubenswrapper[4902]: I1202 15:43:59.931865 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv8bl\" (UniqueName: \"kubernetes.io/projected/104ff275-5747-44f0-8262-792404a685f8-kube-api-access-fv8bl\") pod \"redhat-marketplace-99chf\" (UID: \"104ff275-5747-44f0-8262-792404a685f8\") " pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:44:00 crc kubenswrapper[4902]: I1202 15:44:00.106599 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:44:00 crc kubenswrapper[4902]: I1202 15:44:00.631928 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-99chf"] Dec 02 15:44:00 crc kubenswrapper[4902]: W1202 15:44:00.639091 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod104ff275_5747_44f0_8262_792404a685f8.slice/crio-4df3fe62635a4a40b1237406be4d1c9aab2234b5c83c455764e369d0128659b1 WatchSource:0}: Error finding container 4df3fe62635a4a40b1237406be4d1c9aab2234b5c83c455764e369d0128659b1: Status 404 returned error can't find the container with id 4df3fe62635a4a40b1237406be4d1c9aab2234b5c83c455764e369d0128659b1 Dec 02 15:44:00 crc kubenswrapper[4902]: I1202 15:44:00.943088 4902 generic.go:334] "Generic (PLEG): container finished" podID="104ff275-5747-44f0-8262-792404a685f8" containerID="061e907bfda0cd486492d8a33bfcd309f8042d8e42516939c89edaed4465266f" exitCode=0 Dec 02 15:44:00 crc kubenswrapper[4902]: I1202 15:44:00.943266 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-99chf" event={"ID":"104ff275-5747-44f0-8262-792404a685f8","Type":"ContainerDied","Data":"061e907bfda0cd486492d8a33bfcd309f8042d8e42516939c89edaed4465266f"} Dec 02 15:44:00 crc kubenswrapper[4902]: I1202 15:44:00.943389 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-99chf" event={"ID":"104ff275-5747-44f0-8262-792404a685f8","Type":"ContainerStarted","Data":"4df3fe62635a4a40b1237406be4d1c9aab2234b5c83c455764e369d0128659b1"} Dec 02 15:44:00 crc kubenswrapper[4902]: I1202 15:44:00.946081 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 15:44:02 crc kubenswrapper[4902]: I1202 15:44:02.961904 4902 generic.go:334] "Generic (PLEG): container finished" podID="104ff275-5747-44f0-8262-792404a685f8" containerID="664b5e8cdc4bb95b63115c1634408193a8076887b8a621e0e2df3b6cc953586f" exitCode=0 Dec 02 15:44:02 crc kubenswrapper[4902]: I1202 15:44:02.962479 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-99chf" event={"ID":"104ff275-5747-44f0-8262-792404a685f8","Type":"ContainerDied","Data":"664b5e8cdc4bb95b63115c1634408193a8076887b8a621e0e2df3b6cc953586f"} Dec 02 15:44:03 crc kubenswrapper[4902]: I1202 15:44:03.976121 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-99chf" event={"ID":"104ff275-5747-44f0-8262-792404a685f8","Type":"ContainerStarted","Data":"421e02a8ca5d725887eeff2edbc94cb154e86ef61dc1501fd93f480089c0d337"} Dec 02 15:44:04 crc kubenswrapper[4902]: I1202 15:44:04.003409 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-99chf" podStartSLOduration=2.489366306 podStartE2EDuration="5.003384061s" podCreationTimestamp="2025-12-02 15:43:59 +0000 UTC" firstStartedPulling="2025-12-02 15:44:00.94578321 +0000 UTC m=+5272.137091919" lastFinishedPulling="2025-12-02 15:44:03.459800955 +0000 UTC m=+5274.651109674" observedRunningTime="2025-12-02 15:44:03.994129738 +0000 UTC m=+5275.185438457" watchObservedRunningTime="2025-12-02 15:44:04.003384061 +0000 UTC m=+5275.194692780" Dec 02 15:44:04 crc kubenswrapper[4902]: I1202 15:44:04.731672 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:44:04 crc kubenswrapper[4902]: I1202 15:44:04.732176 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:44:10 crc kubenswrapper[4902]: I1202 15:44:10.107497 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:44:10 crc kubenswrapper[4902]: I1202 15:44:10.108191 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:44:10 crc kubenswrapper[4902]: I1202 15:44:10.173259 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:44:11 crc kubenswrapper[4902]: I1202 15:44:11.096712 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:44:11 crc kubenswrapper[4902]: I1202 15:44:11.167963 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-99chf"] Dec 02 15:44:13 crc kubenswrapper[4902]: I1202 15:44:13.069756 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-99chf" podUID="104ff275-5747-44f0-8262-792404a685f8" containerName="registry-server" containerID="cri-o://421e02a8ca5d725887eeff2edbc94cb154e86ef61dc1501fd93f480089c0d337" gracePeriod=2 Dec 02 15:44:13 crc kubenswrapper[4902]: I1202 15:44:13.647030 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:44:13 crc kubenswrapper[4902]: I1202 15:44:13.789463 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/104ff275-5747-44f0-8262-792404a685f8-utilities\") pod \"104ff275-5747-44f0-8262-792404a685f8\" (UID: \"104ff275-5747-44f0-8262-792404a685f8\") " Dec 02 15:44:13 crc kubenswrapper[4902]: I1202 15:44:13.789599 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/104ff275-5747-44f0-8262-792404a685f8-catalog-content\") pod \"104ff275-5747-44f0-8262-792404a685f8\" (UID: \"104ff275-5747-44f0-8262-792404a685f8\") " Dec 02 15:44:13 crc kubenswrapper[4902]: I1202 15:44:13.789706 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fv8bl\" (UniqueName: \"kubernetes.io/projected/104ff275-5747-44f0-8262-792404a685f8-kube-api-access-fv8bl\") pod \"104ff275-5747-44f0-8262-792404a685f8\" (UID: \"104ff275-5747-44f0-8262-792404a685f8\") " Dec 02 15:44:13 crc kubenswrapper[4902]: I1202 15:44:13.791516 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/104ff275-5747-44f0-8262-792404a685f8-utilities" (OuterVolumeSpecName: "utilities") pod "104ff275-5747-44f0-8262-792404a685f8" (UID: "104ff275-5747-44f0-8262-792404a685f8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:44:13 crc kubenswrapper[4902]: I1202 15:44:13.799868 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/104ff275-5747-44f0-8262-792404a685f8-kube-api-access-fv8bl" (OuterVolumeSpecName: "kube-api-access-fv8bl") pod "104ff275-5747-44f0-8262-792404a685f8" (UID: "104ff275-5747-44f0-8262-792404a685f8"). InnerVolumeSpecName "kube-api-access-fv8bl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:44:13 crc kubenswrapper[4902]: I1202 15:44:13.810510 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/104ff275-5747-44f0-8262-792404a685f8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "104ff275-5747-44f0-8262-792404a685f8" (UID: "104ff275-5747-44f0-8262-792404a685f8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:44:13 crc kubenswrapper[4902]: I1202 15:44:13.891986 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/104ff275-5747-44f0-8262-792404a685f8-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:44:13 crc kubenswrapper[4902]: I1202 15:44:13.892014 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/104ff275-5747-44f0-8262-792404a685f8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:44:13 crc kubenswrapper[4902]: I1202 15:44:13.892027 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fv8bl\" (UniqueName: \"kubernetes.io/projected/104ff275-5747-44f0-8262-792404a685f8-kube-api-access-fv8bl\") on node \"crc\" DevicePath \"\"" Dec 02 15:44:14 crc kubenswrapper[4902]: I1202 15:44:14.084397 4902 generic.go:334] "Generic (PLEG): container finished" podID="104ff275-5747-44f0-8262-792404a685f8" containerID="421e02a8ca5d725887eeff2edbc94cb154e86ef61dc1501fd93f480089c0d337" exitCode=0 Dec 02 15:44:14 crc kubenswrapper[4902]: I1202 15:44:14.084437 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-99chf" event={"ID":"104ff275-5747-44f0-8262-792404a685f8","Type":"ContainerDied","Data":"421e02a8ca5d725887eeff2edbc94cb154e86ef61dc1501fd93f480089c0d337"} Dec 02 15:44:14 crc kubenswrapper[4902]: I1202 15:44:14.084546 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-99chf" event={"ID":"104ff275-5747-44f0-8262-792404a685f8","Type":"ContainerDied","Data":"4df3fe62635a4a40b1237406be4d1c9aab2234b5c83c455764e369d0128659b1"} Dec 02 15:44:14 crc kubenswrapper[4902]: I1202 15:44:14.084604 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-99chf" Dec 02 15:44:14 crc kubenswrapper[4902]: I1202 15:44:14.084654 4902 scope.go:117] "RemoveContainer" containerID="421e02a8ca5d725887eeff2edbc94cb154e86ef61dc1501fd93f480089c0d337" Dec 02 15:44:14 crc kubenswrapper[4902]: I1202 15:44:14.114327 4902 scope.go:117] "RemoveContainer" containerID="664b5e8cdc4bb95b63115c1634408193a8076887b8a621e0e2df3b6cc953586f" Dec 02 15:44:14 crc kubenswrapper[4902]: I1202 15:44:14.151799 4902 scope.go:117] "RemoveContainer" containerID="061e907bfda0cd486492d8a33bfcd309f8042d8e42516939c89edaed4465266f" Dec 02 15:44:14 crc kubenswrapper[4902]: I1202 15:44:14.153135 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-99chf"] Dec 02 15:44:14 crc kubenswrapper[4902]: I1202 15:44:14.175176 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-99chf"] Dec 02 15:44:14 crc kubenswrapper[4902]: I1202 15:44:14.212812 4902 scope.go:117] "RemoveContainer" containerID="421e02a8ca5d725887eeff2edbc94cb154e86ef61dc1501fd93f480089c0d337" Dec 02 15:44:14 crc kubenswrapper[4902]: E1202 15:44:14.214118 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"421e02a8ca5d725887eeff2edbc94cb154e86ef61dc1501fd93f480089c0d337\": container with ID starting with 421e02a8ca5d725887eeff2edbc94cb154e86ef61dc1501fd93f480089c0d337 not found: ID does not exist" containerID="421e02a8ca5d725887eeff2edbc94cb154e86ef61dc1501fd93f480089c0d337" Dec 02 15:44:14 crc kubenswrapper[4902]: I1202 15:44:14.214200 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"421e02a8ca5d725887eeff2edbc94cb154e86ef61dc1501fd93f480089c0d337"} err="failed to get container status \"421e02a8ca5d725887eeff2edbc94cb154e86ef61dc1501fd93f480089c0d337\": rpc error: code = NotFound desc = could not find container \"421e02a8ca5d725887eeff2edbc94cb154e86ef61dc1501fd93f480089c0d337\": container with ID starting with 421e02a8ca5d725887eeff2edbc94cb154e86ef61dc1501fd93f480089c0d337 not found: ID does not exist" Dec 02 15:44:14 crc kubenswrapper[4902]: I1202 15:44:14.214423 4902 scope.go:117] "RemoveContainer" containerID="664b5e8cdc4bb95b63115c1634408193a8076887b8a621e0e2df3b6cc953586f" Dec 02 15:44:14 crc kubenswrapper[4902]: E1202 15:44:14.214802 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"664b5e8cdc4bb95b63115c1634408193a8076887b8a621e0e2df3b6cc953586f\": container with ID starting with 664b5e8cdc4bb95b63115c1634408193a8076887b8a621e0e2df3b6cc953586f not found: ID does not exist" containerID="664b5e8cdc4bb95b63115c1634408193a8076887b8a621e0e2df3b6cc953586f" Dec 02 15:44:14 crc kubenswrapper[4902]: I1202 15:44:14.214835 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"664b5e8cdc4bb95b63115c1634408193a8076887b8a621e0e2df3b6cc953586f"} err="failed to get container status \"664b5e8cdc4bb95b63115c1634408193a8076887b8a621e0e2df3b6cc953586f\": rpc error: code = NotFound desc = could not find container \"664b5e8cdc4bb95b63115c1634408193a8076887b8a621e0e2df3b6cc953586f\": container with ID starting with 664b5e8cdc4bb95b63115c1634408193a8076887b8a621e0e2df3b6cc953586f not found: ID does not exist" Dec 02 15:44:14 crc kubenswrapper[4902]: I1202 15:44:14.214856 4902 scope.go:117] "RemoveContainer" containerID="061e907bfda0cd486492d8a33bfcd309f8042d8e42516939c89edaed4465266f" Dec 02 15:44:14 crc kubenswrapper[4902]: E1202 15:44:14.215385 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"061e907bfda0cd486492d8a33bfcd309f8042d8e42516939c89edaed4465266f\": container with ID starting with 061e907bfda0cd486492d8a33bfcd309f8042d8e42516939c89edaed4465266f not found: ID does not exist" containerID="061e907bfda0cd486492d8a33bfcd309f8042d8e42516939c89edaed4465266f" Dec 02 15:44:14 crc kubenswrapper[4902]: I1202 15:44:14.215414 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"061e907bfda0cd486492d8a33bfcd309f8042d8e42516939c89edaed4465266f"} err="failed to get container status \"061e907bfda0cd486492d8a33bfcd309f8042d8e42516939c89edaed4465266f\": rpc error: code = NotFound desc = could not find container \"061e907bfda0cd486492d8a33bfcd309f8042d8e42516939c89edaed4465266f\": container with ID starting with 061e907bfda0cd486492d8a33bfcd309f8042d8e42516939c89edaed4465266f not found: ID does not exist" Dec 02 15:44:15 crc kubenswrapper[4902]: I1202 15:44:15.148162 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="104ff275-5747-44f0-8262-792404a685f8" path="/var/lib/kubelet/pods/104ff275-5747-44f0-8262-792404a685f8/volumes" Dec 02 15:44:34 crc kubenswrapper[4902]: I1202 15:44:34.731392 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:44:34 crc kubenswrapper[4902]: I1202 15:44:34.732012 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:44:34 crc kubenswrapper[4902]: I1202 15:44:34.732052 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 15:44:34 crc kubenswrapper[4902]: I1202 15:44:34.732848 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 15:44:34 crc kubenswrapper[4902]: I1202 15:44:34.732895 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" gracePeriod=600 Dec 02 15:44:34 crc kubenswrapper[4902]: E1202 15:44:34.865846 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:44:35 crc kubenswrapper[4902]: I1202 15:44:35.398641 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" exitCode=0 Dec 02 15:44:35 crc kubenswrapper[4902]: I1202 15:44:35.398728 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905"} Dec 02 15:44:35 crc kubenswrapper[4902]: I1202 15:44:35.398788 4902 scope.go:117] "RemoveContainer" containerID="d874727028053e419314c0c14db863d86b27cc896b75b0a0805a70ab67d01348" Dec 02 15:44:35 crc kubenswrapper[4902]: I1202 15:44:35.399798 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:44:35 crc kubenswrapper[4902]: E1202 15:44:35.400377 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:44:35 crc kubenswrapper[4902]: E1202 15:44:35.407018 4902 kuberuntime_gc.go:389] "Failed to remove container log dead symlink" err="remove /var/log/containers/machine-config-daemon-v8znh_openshift-machine-config-operator_machine-config-daemon-d874727028053e419314c0c14db863d86b27cc896b75b0a0805a70ab67d01348.log: no such file or directory" path="/var/log/containers/machine-config-daemon-v8znh_openshift-machine-config-operator_machine-config-daemon-d874727028053e419314c0c14db863d86b27cc896b75b0a0805a70ab67d01348.log" Dec 02 15:44:51 crc kubenswrapper[4902]: I1202 15:44:51.112525 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:44:51 crc kubenswrapper[4902]: E1202 15:44:51.113292 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.163637 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf"] Dec 02 15:45:00 crc kubenswrapper[4902]: E1202 15:45:00.164521 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104ff275-5747-44f0-8262-792404a685f8" containerName="extract-content" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.164537 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="104ff275-5747-44f0-8262-792404a685f8" containerName="extract-content" Dec 02 15:45:00 crc kubenswrapper[4902]: E1202 15:45:00.164584 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104ff275-5747-44f0-8262-792404a685f8" containerName="registry-server" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.164593 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="104ff275-5747-44f0-8262-792404a685f8" containerName="registry-server" Dec 02 15:45:00 crc kubenswrapper[4902]: E1202 15:45:00.164646 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104ff275-5747-44f0-8262-792404a685f8" containerName="extract-utilities" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.164654 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="104ff275-5747-44f0-8262-792404a685f8" containerName="extract-utilities" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.164895 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="104ff275-5747-44f0-8262-792404a685f8" containerName="registry-server" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.165738 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.168257 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.169138 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.177907 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf"] Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.262822 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t785z\" (UniqueName: \"kubernetes.io/projected/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-kube-api-access-t785z\") pod \"collect-profiles-29411505-6qwmf\" (UID: \"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.263144 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-secret-volume\") pod \"collect-profiles-29411505-6qwmf\" (UID: \"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.263162 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-config-volume\") pod \"collect-profiles-29411505-6qwmf\" (UID: \"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.365175 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t785z\" (UniqueName: \"kubernetes.io/projected/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-kube-api-access-t785z\") pod \"collect-profiles-29411505-6qwmf\" (UID: \"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.365255 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-secret-volume\") pod \"collect-profiles-29411505-6qwmf\" (UID: \"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.365275 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-config-volume\") pod \"collect-profiles-29411505-6qwmf\" (UID: \"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.366278 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-config-volume\") pod \"collect-profiles-29411505-6qwmf\" (UID: \"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.373039 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-secret-volume\") pod \"collect-profiles-29411505-6qwmf\" (UID: \"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.381717 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t785z\" (UniqueName: \"kubernetes.io/projected/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-kube-api-access-t785z\") pod \"collect-profiles-29411505-6qwmf\" (UID: \"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" Dec 02 15:45:00 crc kubenswrapper[4902]: I1202 15:45:00.510937 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" Dec 02 15:45:01 crc kubenswrapper[4902]: I1202 15:45:01.010767 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf"] Dec 02 15:45:01 crc kubenswrapper[4902]: I1202 15:45:01.675895 4902 generic.go:334] "Generic (PLEG): container finished" podID="9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5" containerID="755ee90ff23c9649f09db42c10a7b7409d6eba898a0bdbfb374e2828c450ae91" exitCode=0 Dec 02 15:45:01 crc kubenswrapper[4902]: I1202 15:45:01.676029 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" event={"ID":"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5","Type":"ContainerDied","Data":"755ee90ff23c9649f09db42c10a7b7409d6eba898a0bdbfb374e2828c450ae91"} Dec 02 15:45:01 crc kubenswrapper[4902]: I1202 15:45:01.677274 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" event={"ID":"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5","Type":"ContainerStarted","Data":"10a22bddf08d2e0193f751fd38793907ef555aa1da0bf13d420d1797f43c4d02"} Dec 02 15:45:03 crc kubenswrapper[4902]: I1202 15:45:03.060496 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" Dec 02 15:45:03 crc kubenswrapper[4902]: I1202 15:45:03.231911 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-secret-volume\") pod \"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5\" (UID: \"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5\") " Dec 02 15:45:03 crc kubenswrapper[4902]: I1202 15:45:03.232019 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-config-volume\") pod \"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5\" (UID: \"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5\") " Dec 02 15:45:03 crc kubenswrapper[4902]: I1202 15:45:03.232090 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t785z\" (UniqueName: \"kubernetes.io/projected/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-kube-api-access-t785z\") pod \"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5\" (UID: \"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5\") " Dec 02 15:45:03 crc kubenswrapper[4902]: I1202 15:45:03.233197 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-config-volume" (OuterVolumeSpecName: "config-volume") pod "9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5" (UID: "9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 15:45:03 crc kubenswrapper[4902]: I1202 15:45:03.234143 4902 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 15:45:03 crc kubenswrapper[4902]: I1202 15:45:03.237164 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5" (UID: "9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 15:45:03 crc kubenswrapper[4902]: I1202 15:45:03.258255 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-kube-api-access-t785z" (OuterVolumeSpecName: "kube-api-access-t785z") pod "9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5" (UID: "9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5"). InnerVolumeSpecName "kube-api-access-t785z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:45:03 crc kubenswrapper[4902]: I1202 15:45:03.336378 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t785z\" (UniqueName: \"kubernetes.io/projected/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-kube-api-access-t785z\") on node \"crc\" DevicePath \"\"" Dec 02 15:45:03 crc kubenswrapper[4902]: I1202 15:45:03.336431 4902 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 15:45:03 crc kubenswrapper[4902]: I1202 15:45:03.702146 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" event={"ID":"9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5","Type":"ContainerDied","Data":"10a22bddf08d2e0193f751fd38793907ef555aa1da0bf13d420d1797f43c4d02"} Dec 02 15:45:03 crc kubenswrapper[4902]: I1202 15:45:03.702221 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10a22bddf08d2e0193f751fd38793907ef555aa1da0bf13d420d1797f43c4d02" Dec 02 15:45:03 crc kubenswrapper[4902]: I1202 15:45:03.702251 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411505-6qwmf" Dec 02 15:45:04 crc kubenswrapper[4902]: I1202 15:45:04.153802 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb"] Dec 02 15:45:04 crc kubenswrapper[4902]: I1202 15:45:04.164524 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411460-2r9xb"] Dec 02 15:45:05 crc kubenswrapper[4902]: I1202 15:45:05.122719 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f92b5dcc-2f9b-440a-8c19-adea872f9df9" path="/var/lib/kubelet/pods/f92b5dcc-2f9b-440a-8c19-adea872f9df9/volumes" Dec 02 15:45:06 crc kubenswrapper[4902]: I1202 15:45:06.107995 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:45:06 crc kubenswrapper[4902]: E1202 15:45:06.108680 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:45:15 crc kubenswrapper[4902]: I1202 15:45:15.853245 4902 generic.go:334] "Generic (PLEG): container finished" podID="c3ebb20e-c142-4cd0-842a-344552cc2014" containerID="59b642074d8518f1443de6f27a8bc49ec090cc13fdc5003d26e6970fc3d73223" exitCode=0 Dec 02 15:45:15 crc kubenswrapper[4902]: I1202 15:45:15.853448 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hvhq4/must-gather-z8rvc" event={"ID":"c3ebb20e-c142-4cd0-842a-344552cc2014","Type":"ContainerDied","Data":"59b642074d8518f1443de6f27a8bc49ec090cc13fdc5003d26e6970fc3d73223"} Dec 02 15:45:15 crc kubenswrapper[4902]: I1202 15:45:15.856624 4902 scope.go:117] "RemoveContainer" containerID="59b642074d8518f1443de6f27a8bc49ec090cc13fdc5003d26e6970fc3d73223" Dec 02 15:45:15 crc kubenswrapper[4902]: I1202 15:45:15.968484 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-hvhq4_must-gather-z8rvc_c3ebb20e-c142-4cd0-842a-344552cc2014/gather/0.log" Dec 02 15:45:20 crc kubenswrapper[4902]: I1202 15:45:20.107512 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:45:20 crc kubenswrapper[4902]: E1202 15:45:20.111183 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:45:24 crc kubenswrapper[4902]: I1202 15:45:24.767331 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-hvhq4/must-gather-z8rvc"] Dec 02 15:45:24 crc kubenswrapper[4902]: I1202 15:45:24.768145 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-hvhq4/must-gather-z8rvc" podUID="c3ebb20e-c142-4cd0-842a-344552cc2014" containerName="copy" containerID="cri-o://c0632413fea6c6015b05cf933b70e64360d5a24e0d695760ac2cc5f0d2117653" gracePeriod=2 Dec 02 15:45:24 crc kubenswrapper[4902]: I1202 15:45:24.782148 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-hvhq4/must-gather-z8rvc"] Dec 02 15:45:24 crc kubenswrapper[4902]: I1202 15:45:24.958783 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-hvhq4_must-gather-z8rvc_c3ebb20e-c142-4cd0-842a-344552cc2014/copy/0.log" Dec 02 15:45:24 crc kubenswrapper[4902]: I1202 15:45:24.959494 4902 generic.go:334] "Generic (PLEG): container finished" podID="c3ebb20e-c142-4cd0-842a-344552cc2014" containerID="c0632413fea6c6015b05cf933b70e64360d5a24e0d695760ac2cc5f0d2117653" exitCode=143 Dec 02 15:45:25 crc kubenswrapper[4902]: I1202 15:45:25.188186 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-hvhq4_must-gather-z8rvc_c3ebb20e-c142-4cd0-842a-344552cc2014/copy/0.log" Dec 02 15:45:25 crc kubenswrapper[4902]: I1202 15:45:25.188637 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/must-gather-z8rvc" Dec 02 15:45:25 crc kubenswrapper[4902]: I1202 15:45:25.300484 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c3ebb20e-c142-4cd0-842a-344552cc2014-must-gather-output\") pod \"c3ebb20e-c142-4cd0-842a-344552cc2014\" (UID: \"c3ebb20e-c142-4cd0-842a-344552cc2014\") " Dec 02 15:45:25 crc kubenswrapper[4902]: I1202 15:45:25.300801 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gx49s\" (UniqueName: \"kubernetes.io/projected/c3ebb20e-c142-4cd0-842a-344552cc2014-kube-api-access-gx49s\") pod \"c3ebb20e-c142-4cd0-842a-344552cc2014\" (UID: \"c3ebb20e-c142-4cd0-842a-344552cc2014\") " Dec 02 15:45:25 crc kubenswrapper[4902]: I1202 15:45:25.306486 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3ebb20e-c142-4cd0-842a-344552cc2014-kube-api-access-gx49s" (OuterVolumeSpecName: "kube-api-access-gx49s") pod "c3ebb20e-c142-4cd0-842a-344552cc2014" (UID: "c3ebb20e-c142-4cd0-842a-344552cc2014"). InnerVolumeSpecName "kube-api-access-gx49s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:45:25 crc kubenswrapper[4902]: I1202 15:45:25.403971 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gx49s\" (UniqueName: \"kubernetes.io/projected/c3ebb20e-c142-4cd0-842a-344552cc2014-kube-api-access-gx49s\") on node \"crc\" DevicePath \"\"" Dec 02 15:45:25 crc kubenswrapper[4902]: I1202 15:45:25.466804 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3ebb20e-c142-4cd0-842a-344552cc2014-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "c3ebb20e-c142-4cd0-842a-344552cc2014" (UID: "c3ebb20e-c142-4cd0-842a-344552cc2014"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:45:25 crc kubenswrapper[4902]: I1202 15:45:25.507057 4902 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c3ebb20e-c142-4cd0-842a-344552cc2014-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 02 15:45:25 crc kubenswrapper[4902]: I1202 15:45:25.970196 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-hvhq4_must-gather-z8rvc_c3ebb20e-c142-4cd0-842a-344552cc2014/copy/0.log" Dec 02 15:45:25 crc kubenswrapper[4902]: I1202 15:45:25.970843 4902 scope.go:117] "RemoveContainer" containerID="c0632413fea6c6015b05cf933b70e64360d5a24e0d695760ac2cc5f0d2117653" Dec 02 15:45:25 crc kubenswrapper[4902]: I1202 15:45:25.970905 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hvhq4/must-gather-z8rvc" Dec 02 15:45:25 crc kubenswrapper[4902]: I1202 15:45:25.995156 4902 scope.go:117] "RemoveContainer" containerID="59b642074d8518f1443de6f27a8bc49ec090cc13fdc5003d26e6970fc3d73223" Dec 02 15:45:27 crc kubenswrapper[4902]: I1202 15:45:27.118278 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3ebb20e-c142-4cd0-842a-344552cc2014" path="/var/lib/kubelet/pods/c3ebb20e-c142-4cd0-842a-344552cc2014/volumes" Dec 02 15:45:32 crc kubenswrapper[4902]: I1202 15:45:32.845625 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bwpd5" podUID="9c65e1ba-dd1f-4d63-ae34-c18525c76bcf" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.83:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 15:45:34 crc kubenswrapper[4902]: I1202 15:45:34.106679 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:45:34 crc kubenswrapper[4902]: E1202 15:45:34.107687 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:45:35 crc kubenswrapper[4902]: I1202 15:45:35.414844 4902 scope.go:117] "RemoveContainer" containerID="4ae679602af91e76f65d50416b442f1ab633cd92983ad6947a5890d2e334db72" Dec 02 15:45:35 crc kubenswrapper[4902]: I1202 15:45:35.842718 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-q46bc"] Dec 02 15:45:35 crc kubenswrapper[4902]: E1202 15:45:35.843500 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5" containerName="collect-profiles" Dec 02 15:45:35 crc kubenswrapper[4902]: I1202 15:45:35.843515 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5" containerName="collect-profiles" Dec 02 15:45:35 crc kubenswrapper[4902]: E1202 15:45:35.843535 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3ebb20e-c142-4cd0-842a-344552cc2014" containerName="copy" Dec 02 15:45:35 crc kubenswrapper[4902]: I1202 15:45:35.843544 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3ebb20e-c142-4cd0-842a-344552cc2014" containerName="copy" Dec 02 15:45:35 crc kubenswrapper[4902]: E1202 15:45:35.843590 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3ebb20e-c142-4cd0-842a-344552cc2014" containerName="gather" Dec 02 15:45:35 crc kubenswrapper[4902]: I1202 15:45:35.843601 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3ebb20e-c142-4cd0-842a-344552cc2014" containerName="gather" Dec 02 15:45:35 crc kubenswrapper[4902]: I1202 15:45:35.843852 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3ebb20e-c142-4cd0-842a-344552cc2014" containerName="copy" Dec 02 15:45:35 crc kubenswrapper[4902]: I1202 15:45:35.843878 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3ebb20e-c142-4cd0-842a-344552cc2014" containerName="gather" Dec 02 15:45:35 crc kubenswrapper[4902]: I1202 15:45:35.843892 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="9518f70e-9ce2-4a9d-ab12-26e7ecd30fd5" containerName="collect-profiles" Dec 02 15:45:35 crc kubenswrapper[4902]: I1202 15:45:35.846718 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q46bc" Dec 02 15:45:35 crc kubenswrapper[4902]: I1202 15:45:35.888758 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q46bc"] Dec 02 15:45:35 crc kubenswrapper[4902]: I1202 15:45:35.930615 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xg8t\" (UniqueName: \"kubernetes.io/projected/da31f203-4e88-423f-b7c4-320cbd35f00f-kube-api-access-7xg8t\") pod \"redhat-operators-q46bc\" (UID: \"da31f203-4e88-423f-b7c4-320cbd35f00f\") " pod="openshift-marketplace/redhat-operators-q46bc" Dec 02 15:45:35 crc kubenswrapper[4902]: I1202 15:45:35.930723 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da31f203-4e88-423f-b7c4-320cbd35f00f-utilities\") pod \"redhat-operators-q46bc\" (UID: \"da31f203-4e88-423f-b7c4-320cbd35f00f\") " pod="openshift-marketplace/redhat-operators-q46bc" Dec 02 15:45:35 crc kubenswrapper[4902]: I1202 15:45:35.930915 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da31f203-4e88-423f-b7c4-320cbd35f00f-catalog-content\") pod \"redhat-operators-q46bc\" (UID: \"da31f203-4e88-423f-b7c4-320cbd35f00f\") " pod="openshift-marketplace/redhat-operators-q46bc" Dec 02 15:45:36 crc kubenswrapper[4902]: I1202 15:45:36.033460 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xg8t\" (UniqueName: \"kubernetes.io/projected/da31f203-4e88-423f-b7c4-320cbd35f00f-kube-api-access-7xg8t\") pod \"redhat-operators-q46bc\" (UID: \"da31f203-4e88-423f-b7c4-320cbd35f00f\") " pod="openshift-marketplace/redhat-operators-q46bc" Dec 02 15:45:36 crc kubenswrapper[4902]: I1202 15:45:36.033529 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da31f203-4e88-423f-b7c4-320cbd35f00f-utilities\") pod \"redhat-operators-q46bc\" (UID: \"da31f203-4e88-423f-b7c4-320cbd35f00f\") " pod="openshift-marketplace/redhat-operators-q46bc" Dec 02 15:45:36 crc kubenswrapper[4902]: I1202 15:45:36.033609 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da31f203-4e88-423f-b7c4-320cbd35f00f-catalog-content\") pod \"redhat-operators-q46bc\" (UID: \"da31f203-4e88-423f-b7c4-320cbd35f00f\") " pod="openshift-marketplace/redhat-operators-q46bc" Dec 02 15:45:36 crc kubenswrapper[4902]: I1202 15:45:36.034032 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da31f203-4e88-423f-b7c4-320cbd35f00f-catalog-content\") pod \"redhat-operators-q46bc\" (UID: \"da31f203-4e88-423f-b7c4-320cbd35f00f\") " pod="openshift-marketplace/redhat-operators-q46bc" Dec 02 15:45:36 crc kubenswrapper[4902]: I1202 15:45:36.034377 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da31f203-4e88-423f-b7c4-320cbd35f00f-utilities\") pod \"redhat-operators-q46bc\" (UID: \"da31f203-4e88-423f-b7c4-320cbd35f00f\") " pod="openshift-marketplace/redhat-operators-q46bc" Dec 02 15:45:36 crc kubenswrapper[4902]: I1202 15:45:36.054613 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xg8t\" (UniqueName: \"kubernetes.io/projected/da31f203-4e88-423f-b7c4-320cbd35f00f-kube-api-access-7xg8t\") pod \"redhat-operators-q46bc\" (UID: \"da31f203-4e88-423f-b7c4-320cbd35f00f\") " pod="openshift-marketplace/redhat-operators-q46bc" Dec 02 15:45:36 crc kubenswrapper[4902]: I1202 15:45:36.202607 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q46bc" Dec 02 15:45:36 crc kubenswrapper[4902]: I1202 15:45:36.660400 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q46bc"] Dec 02 15:45:37 crc kubenswrapper[4902]: I1202 15:45:37.092522 4902 generic.go:334] "Generic (PLEG): container finished" podID="da31f203-4e88-423f-b7c4-320cbd35f00f" containerID="e4debcb3eb15c8272930a2b89c854244b34a622a9fc913a43b2149453aa85b96" exitCode=0 Dec 02 15:45:37 crc kubenswrapper[4902]: I1202 15:45:37.092587 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q46bc" event={"ID":"da31f203-4e88-423f-b7c4-320cbd35f00f","Type":"ContainerDied","Data":"e4debcb3eb15c8272930a2b89c854244b34a622a9fc913a43b2149453aa85b96"} Dec 02 15:45:37 crc kubenswrapper[4902]: I1202 15:45:37.092617 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q46bc" event={"ID":"da31f203-4e88-423f-b7c4-320cbd35f00f","Type":"ContainerStarted","Data":"40da3520642aa8a0838d387940884b62bdcf438fab6a400893c961462e4bae40"} Dec 02 15:45:45 crc kubenswrapper[4902]: I1202 15:45:45.189979 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q46bc" event={"ID":"da31f203-4e88-423f-b7c4-320cbd35f00f","Type":"ContainerStarted","Data":"e7737b69385b4b42546189f98c9bcb3a062f073c7de29d2d4fcfdf15daa1c438"} Dec 02 15:45:47 crc kubenswrapper[4902]: I1202 15:45:47.211432 4902 generic.go:334] "Generic (PLEG): container finished" podID="da31f203-4e88-423f-b7c4-320cbd35f00f" containerID="e7737b69385b4b42546189f98c9bcb3a062f073c7de29d2d4fcfdf15daa1c438" exitCode=0 Dec 02 15:45:47 crc kubenswrapper[4902]: I1202 15:45:47.211488 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q46bc" event={"ID":"da31f203-4e88-423f-b7c4-320cbd35f00f","Type":"ContainerDied","Data":"e7737b69385b4b42546189f98c9bcb3a062f073c7de29d2d4fcfdf15daa1c438"} Dec 02 15:45:49 crc kubenswrapper[4902]: I1202 15:45:49.112751 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:45:49 crc kubenswrapper[4902]: E1202 15:45:49.113316 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:45:51 crc kubenswrapper[4902]: I1202 15:45:51.251914 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q46bc" event={"ID":"da31f203-4e88-423f-b7c4-320cbd35f00f","Type":"ContainerStarted","Data":"7f82f32f6d7564ab934582bdf232c85800a5ada5e70ba6ead61edc8e1eca0175"} Dec 02 15:45:51 crc kubenswrapper[4902]: I1202 15:45:51.309644 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-q46bc" podStartSLOduration=2.934389161 podStartE2EDuration="16.309557666s" podCreationTimestamp="2025-12-02 15:45:35 +0000 UTC" firstStartedPulling="2025-12-02 15:45:37.094822691 +0000 UTC m=+5368.286131400" lastFinishedPulling="2025-12-02 15:45:50.469991196 +0000 UTC m=+5381.661299905" observedRunningTime="2025-12-02 15:45:51.29348736 +0000 UTC m=+5382.484796069" watchObservedRunningTime="2025-12-02 15:45:51.309557666 +0000 UTC m=+5382.500866375" Dec 02 15:45:56 crc kubenswrapper[4902]: I1202 15:45:56.203769 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-q46bc" Dec 02 15:45:56 crc kubenswrapper[4902]: I1202 15:45:56.204345 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-q46bc" Dec 02 15:45:57 crc kubenswrapper[4902]: I1202 15:45:57.256828 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-q46bc" podUID="da31f203-4e88-423f-b7c4-320cbd35f00f" containerName="registry-server" probeResult="failure" output=< Dec 02 15:45:57 crc kubenswrapper[4902]: timeout: failed to connect service ":50051" within 1s Dec 02 15:45:57 crc kubenswrapper[4902]: > Dec 02 15:46:03 crc kubenswrapper[4902]: I1202 15:46:03.107502 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:46:03 crc kubenswrapper[4902]: E1202 15:46:03.108226 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:46:06 crc kubenswrapper[4902]: I1202 15:46:06.297061 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-q46bc" Dec 02 15:46:06 crc kubenswrapper[4902]: I1202 15:46:06.378957 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-q46bc" Dec 02 15:46:06 crc kubenswrapper[4902]: I1202 15:46:06.888744 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q46bc"] Dec 02 15:46:07 crc kubenswrapper[4902]: I1202 15:46:07.039493 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ktv4s"] Dec 02 15:46:07 crc kubenswrapper[4902]: I1202 15:46:07.039813 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ktv4s" podUID="ac72e289-8af7-4534-a325-fddddca513cc" containerName="registry-server" containerID="cri-o://ca1dd27d9173b1846a2d47fbb9ad3d0be0dbec390ca6c07b1fe249f87b543b16" gracePeriod=2 Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.380255 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.434041 4902 generic.go:334] "Generic (PLEG): container finished" podID="ac72e289-8af7-4534-a325-fddddca513cc" containerID="ca1dd27d9173b1846a2d47fbb9ad3d0be0dbec390ca6c07b1fe249f87b543b16" exitCode=0 Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.434361 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ktv4s" event={"ID":"ac72e289-8af7-4534-a325-fddddca513cc","Type":"ContainerDied","Data":"ca1dd27d9173b1846a2d47fbb9ad3d0be0dbec390ca6c07b1fe249f87b543b16"} Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.434389 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ktv4s" event={"ID":"ac72e289-8af7-4534-a325-fddddca513cc","Type":"ContainerDied","Data":"a5a5b63ee2b1e4b8f2a255ece6e5131f08a8e9883826556f09a25f54dfbb449e"} Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.434405 4902 scope.go:117] "RemoveContainer" containerID="ca1dd27d9173b1846a2d47fbb9ad3d0be0dbec390ca6c07b1fe249f87b543b16" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.434531 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ktv4s" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.453697 4902 scope.go:117] "RemoveContainer" containerID="44312ed7c5be567a2be50669b64c12596c145acf4fdc365a6efac3596110dc91" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.468281 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5mp85\" (UniqueName: \"kubernetes.io/projected/ac72e289-8af7-4534-a325-fddddca513cc-kube-api-access-5mp85\") pod \"ac72e289-8af7-4534-a325-fddddca513cc\" (UID: \"ac72e289-8af7-4534-a325-fddddca513cc\") " Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.469096 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac72e289-8af7-4534-a325-fddddca513cc-utilities\") pod \"ac72e289-8af7-4534-a325-fddddca513cc\" (UID: \"ac72e289-8af7-4534-a325-fddddca513cc\") " Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.469416 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac72e289-8af7-4534-a325-fddddca513cc-catalog-content\") pod \"ac72e289-8af7-4534-a325-fddddca513cc\" (UID: \"ac72e289-8af7-4534-a325-fddddca513cc\") " Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.469429 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac72e289-8af7-4534-a325-fddddca513cc-utilities" (OuterVolumeSpecName: "utilities") pod "ac72e289-8af7-4534-a325-fddddca513cc" (UID: "ac72e289-8af7-4534-a325-fddddca513cc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.470252 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac72e289-8af7-4534-a325-fddddca513cc-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.473908 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac72e289-8af7-4534-a325-fddddca513cc-kube-api-access-5mp85" (OuterVolumeSpecName: "kube-api-access-5mp85") pod "ac72e289-8af7-4534-a325-fddddca513cc" (UID: "ac72e289-8af7-4534-a325-fddddca513cc"). InnerVolumeSpecName "kube-api-access-5mp85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.482381 4902 scope.go:117] "RemoveContainer" containerID="aaee14a15df1d2786eb09ec8e9f2e79bb38a67c8d3b8d09efcb127bd6913f5f9" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.565799 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac72e289-8af7-4534-a325-fddddca513cc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac72e289-8af7-4534-a325-fddddca513cc" (UID: "ac72e289-8af7-4534-a325-fddddca513cc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.573819 4902 scope.go:117] "RemoveContainer" containerID="ca1dd27d9173b1846a2d47fbb9ad3d0be0dbec390ca6c07b1fe249f87b543b16" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.574079 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac72e289-8af7-4534-a325-fddddca513cc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.574136 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5mp85\" (UniqueName: \"kubernetes.io/projected/ac72e289-8af7-4534-a325-fddddca513cc-kube-api-access-5mp85\") on node \"crc\" DevicePath \"\"" Dec 02 15:46:08 crc kubenswrapper[4902]: E1202 15:46:08.574375 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca1dd27d9173b1846a2d47fbb9ad3d0be0dbec390ca6c07b1fe249f87b543b16\": container with ID starting with ca1dd27d9173b1846a2d47fbb9ad3d0be0dbec390ca6c07b1fe249f87b543b16 not found: ID does not exist" containerID="ca1dd27d9173b1846a2d47fbb9ad3d0be0dbec390ca6c07b1fe249f87b543b16" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.574422 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca1dd27d9173b1846a2d47fbb9ad3d0be0dbec390ca6c07b1fe249f87b543b16"} err="failed to get container status \"ca1dd27d9173b1846a2d47fbb9ad3d0be0dbec390ca6c07b1fe249f87b543b16\": rpc error: code = NotFound desc = could not find container \"ca1dd27d9173b1846a2d47fbb9ad3d0be0dbec390ca6c07b1fe249f87b543b16\": container with ID starting with ca1dd27d9173b1846a2d47fbb9ad3d0be0dbec390ca6c07b1fe249f87b543b16 not found: ID does not exist" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.574454 4902 scope.go:117] "RemoveContainer" containerID="44312ed7c5be567a2be50669b64c12596c145acf4fdc365a6efac3596110dc91" Dec 02 15:46:08 crc kubenswrapper[4902]: E1202 15:46:08.575911 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44312ed7c5be567a2be50669b64c12596c145acf4fdc365a6efac3596110dc91\": container with ID starting with 44312ed7c5be567a2be50669b64c12596c145acf4fdc365a6efac3596110dc91 not found: ID does not exist" containerID="44312ed7c5be567a2be50669b64c12596c145acf4fdc365a6efac3596110dc91" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.575945 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44312ed7c5be567a2be50669b64c12596c145acf4fdc365a6efac3596110dc91"} err="failed to get container status \"44312ed7c5be567a2be50669b64c12596c145acf4fdc365a6efac3596110dc91\": rpc error: code = NotFound desc = could not find container \"44312ed7c5be567a2be50669b64c12596c145acf4fdc365a6efac3596110dc91\": container with ID starting with 44312ed7c5be567a2be50669b64c12596c145acf4fdc365a6efac3596110dc91 not found: ID does not exist" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.575967 4902 scope.go:117] "RemoveContainer" containerID="aaee14a15df1d2786eb09ec8e9f2e79bb38a67c8d3b8d09efcb127bd6913f5f9" Dec 02 15:46:08 crc kubenswrapper[4902]: E1202 15:46:08.576730 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aaee14a15df1d2786eb09ec8e9f2e79bb38a67c8d3b8d09efcb127bd6913f5f9\": container with ID starting with aaee14a15df1d2786eb09ec8e9f2e79bb38a67c8d3b8d09efcb127bd6913f5f9 not found: ID does not exist" containerID="aaee14a15df1d2786eb09ec8e9f2e79bb38a67c8d3b8d09efcb127bd6913f5f9" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.576759 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaee14a15df1d2786eb09ec8e9f2e79bb38a67c8d3b8d09efcb127bd6913f5f9"} err="failed to get container status \"aaee14a15df1d2786eb09ec8e9f2e79bb38a67c8d3b8d09efcb127bd6913f5f9\": rpc error: code = NotFound desc = could not find container \"aaee14a15df1d2786eb09ec8e9f2e79bb38a67c8d3b8d09efcb127bd6913f5f9\": container with ID starting with aaee14a15df1d2786eb09ec8e9f2e79bb38a67c8d3b8d09efcb127bd6913f5f9 not found: ID does not exist" Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.768011 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ktv4s"] Dec 02 15:46:08 crc kubenswrapper[4902]: I1202 15:46:08.775611 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ktv4s"] Dec 02 15:46:09 crc kubenswrapper[4902]: I1202 15:46:09.119086 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac72e289-8af7-4534-a325-fddddca513cc" path="/var/lib/kubelet/pods/ac72e289-8af7-4534-a325-fddddca513cc/volumes" Dec 02 15:46:17 crc kubenswrapper[4902]: I1202 15:46:17.107121 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:46:17 crc kubenswrapper[4902]: E1202 15:46:17.107820 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:46:32 crc kubenswrapper[4902]: I1202 15:46:32.107271 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:46:32 crc kubenswrapper[4902]: E1202 15:46:32.108400 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:46:35 crc kubenswrapper[4902]: I1202 15:46:35.513716 4902 scope.go:117] "RemoveContainer" containerID="bc8ae2d803da0f0ba5fd888a67397bbe45ebdd760eb375344e5a629f1bda195d" Dec 02 15:46:43 crc kubenswrapper[4902]: I1202 15:46:43.107235 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:46:43 crc kubenswrapper[4902]: E1202 15:46:43.108007 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:46:56 crc kubenswrapper[4902]: I1202 15:46:56.107193 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:46:56 crc kubenswrapper[4902]: E1202 15:46:56.108582 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:47:07 crc kubenswrapper[4902]: I1202 15:47:07.107392 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:47:07 crc kubenswrapper[4902]: E1202 15:47:07.108073 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:47:22 crc kubenswrapper[4902]: I1202 15:47:22.107349 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:47:22 crc kubenswrapper[4902]: E1202 15:47:22.108713 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:47:36 crc kubenswrapper[4902]: I1202 15:47:36.107412 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:47:36 crc kubenswrapper[4902]: E1202 15:47:36.108543 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:47:47 crc kubenswrapper[4902]: I1202 15:47:47.106995 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:47:47 crc kubenswrapper[4902]: E1202 15:47:47.107992 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:48:02 crc kubenswrapper[4902]: I1202 15:48:02.106601 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:48:02 crc kubenswrapper[4902]: E1202 15:48:02.108139 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:48:15 crc kubenswrapper[4902]: I1202 15:48:15.107050 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:48:15 crc kubenswrapper[4902]: E1202 15:48:15.108704 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:48:26 crc kubenswrapper[4902]: I1202 15:48:26.106898 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:48:26 crc kubenswrapper[4902]: E1202 15:48:26.107810 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.096881 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9kwz5/must-gather-9whft"] Dec 02 15:48:33 crc kubenswrapper[4902]: E1202 15:48:33.101301 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac72e289-8af7-4534-a325-fddddca513cc" containerName="extract-utilities" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.101325 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac72e289-8af7-4534-a325-fddddca513cc" containerName="extract-utilities" Dec 02 15:48:33 crc kubenswrapper[4902]: E1202 15:48:33.101375 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac72e289-8af7-4534-a325-fddddca513cc" containerName="extract-content" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.101383 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac72e289-8af7-4534-a325-fddddca513cc" containerName="extract-content" Dec 02 15:48:33 crc kubenswrapper[4902]: E1202 15:48:33.101405 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac72e289-8af7-4534-a325-fddddca513cc" containerName="registry-server" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.101413 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac72e289-8af7-4534-a325-fddddca513cc" containerName="registry-server" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.101861 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac72e289-8af7-4534-a325-fddddca513cc" containerName="registry-server" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.103400 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/must-gather-9whft" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.105706 4902 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-9kwz5"/"default-dockercfg-zvdb9" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.106000 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9kwz5"/"openshift-service-ca.crt" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.113491 4902 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9kwz5"/"kube-root-ca.crt" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.146178 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9kwz5/must-gather-9whft"] Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.211139 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/910db312-d28e-43b2-b759-88886bd7d7a7-must-gather-output\") pod \"must-gather-9whft\" (UID: \"910db312-d28e-43b2-b759-88886bd7d7a7\") " pod="openshift-must-gather-9kwz5/must-gather-9whft" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.211197 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rshrt\" (UniqueName: \"kubernetes.io/projected/910db312-d28e-43b2-b759-88886bd7d7a7-kube-api-access-rshrt\") pod \"must-gather-9whft\" (UID: \"910db312-d28e-43b2-b759-88886bd7d7a7\") " pod="openshift-must-gather-9kwz5/must-gather-9whft" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.313375 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/910db312-d28e-43b2-b759-88886bd7d7a7-must-gather-output\") pod \"must-gather-9whft\" (UID: \"910db312-d28e-43b2-b759-88886bd7d7a7\") " pod="openshift-must-gather-9kwz5/must-gather-9whft" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.313429 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rshrt\" (UniqueName: \"kubernetes.io/projected/910db312-d28e-43b2-b759-88886bd7d7a7-kube-api-access-rshrt\") pod \"must-gather-9whft\" (UID: \"910db312-d28e-43b2-b759-88886bd7d7a7\") " pod="openshift-must-gather-9kwz5/must-gather-9whft" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.313818 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/910db312-d28e-43b2-b759-88886bd7d7a7-must-gather-output\") pod \"must-gather-9whft\" (UID: \"910db312-d28e-43b2-b759-88886bd7d7a7\") " pod="openshift-must-gather-9kwz5/must-gather-9whft" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.347109 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rshrt\" (UniqueName: \"kubernetes.io/projected/910db312-d28e-43b2-b759-88886bd7d7a7-kube-api-access-rshrt\") pod \"must-gather-9whft\" (UID: \"910db312-d28e-43b2-b759-88886bd7d7a7\") " pod="openshift-must-gather-9kwz5/must-gather-9whft" Dec 02 15:48:33 crc kubenswrapper[4902]: I1202 15:48:33.424227 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/must-gather-9whft" Dec 02 15:48:34 crc kubenswrapper[4902]: I1202 15:48:34.008243 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9kwz5/must-gather-9whft"] Dec 02 15:48:34 crc kubenswrapper[4902]: W1202 15:48:34.010420 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod910db312_d28e_43b2_b759_88886bd7d7a7.slice/crio-3ef5fc178e71af08d1ed4cafa08461e795d2d9186b29fd5134679b8c554b122e WatchSource:0}: Error finding container 3ef5fc178e71af08d1ed4cafa08461e795d2d9186b29fd5134679b8c554b122e: Status 404 returned error can't find the container with id 3ef5fc178e71af08d1ed4cafa08461e795d2d9186b29fd5134679b8c554b122e Dec 02 15:48:34 crc kubenswrapper[4902]: I1202 15:48:34.165809 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9kwz5/must-gather-9whft" event={"ID":"910db312-d28e-43b2-b759-88886bd7d7a7","Type":"ContainerStarted","Data":"3ef5fc178e71af08d1ed4cafa08461e795d2d9186b29fd5134679b8c554b122e"} Dec 02 15:48:35 crc kubenswrapper[4902]: I1202 15:48:35.174464 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9kwz5/must-gather-9whft" event={"ID":"910db312-d28e-43b2-b759-88886bd7d7a7","Type":"ContainerStarted","Data":"d755351f80049ec6e022c1ec6f603ed1ab557d0f79ddbd3642fb2dbf2512ff9d"} Dec 02 15:48:35 crc kubenswrapper[4902]: I1202 15:48:35.174997 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9kwz5/must-gather-9whft" event={"ID":"910db312-d28e-43b2-b759-88886bd7d7a7","Type":"ContainerStarted","Data":"3cc83d93c0d1b184af830e105f1a01111ad3aa89b9057baa63b8515aa64b70b1"} Dec 02 15:48:35 crc kubenswrapper[4902]: I1202 15:48:35.202176 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9kwz5/must-gather-9whft" podStartSLOduration=2.2021496210000002 podStartE2EDuration="2.202149621s" podCreationTimestamp="2025-12-02 15:48:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 15:48:35.190044287 +0000 UTC m=+5546.381352996" watchObservedRunningTime="2025-12-02 15:48:35.202149621 +0000 UTC m=+5546.393458360" Dec 02 15:48:38 crc kubenswrapper[4902]: I1202 15:48:38.639927 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9kwz5/crc-debug-txqzk"] Dec 02 15:48:38 crc kubenswrapper[4902]: I1202 15:48:38.641714 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/crc-debug-txqzk" Dec 02 15:48:38 crc kubenswrapper[4902]: I1202 15:48:38.720920 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkp6r\" (UniqueName: \"kubernetes.io/projected/38dfea75-55ee-4dd7-abfe-f28555b9cfd3-kube-api-access-vkp6r\") pod \"crc-debug-txqzk\" (UID: \"38dfea75-55ee-4dd7-abfe-f28555b9cfd3\") " pod="openshift-must-gather-9kwz5/crc-debug-txqzk" Dec 02 15:48:38 crc kubenswrapper[4902]: I1202 15:48:38.721005 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/38dfea75-55ee-4dd7-abfe-f28555b9cfd3-host\") pod \"crc-debug-txqzk\" (UID: \"38dfea75-55ee-4dd7-abfe-f28555b9cfd3\") " pod="openshift-must-gather-9kwz5/crc-debug-txqzk" Dec 02 15:48:38 crc kubenswrapper[4902]: I1202 15:48:38.828376 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkp6r\" (UniqueName: \"kubernetes.io/projected/38dfea75-55ee-4dd7-abfe-f28555b9cfd3-kube-api-access-vkp6r\") pod \"crc-debug-txqzk\" (UID: \"38dfea75-55ee-4dd7-abfe-f28555b9cfd3\") " pod="openshift-must-gather-9kwz5/crc-debug-txqzk" Dec 02 15:48:38 crc kubenswrapper[4902]: I1202 15:48:38.828555 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/38dfea75-55ee-4dd7-abfe-f28555b9cfd3-host\") pod \"crc-debug-txqzk\" (UID: \"38dfea75-55ee-4dd7-abfe-f28555b9cfd3\") " pod="openshift-must-gather-9kwz5/crc-debug-txqzk" Dec 02 15:48:38 crc kubenswrapper[4902]: I1202 15:48:38.829218 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/38dfea75-55ee-4dd7-abfe-f28555b9cfd3-host\") pod \"crc-debug-txqzk\" (UID: \"38dfea75-55ee-4dd7-abfe-f28555b9cfd3\") " pod="openshift-must-gather-9kwz5/crc-debug-txqzk" Dec 02 15:48:38 crc kubenswrapper[4902]: I1202 15:48:38.859444 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkp6r\" (UniqueName: \"kubernetes.io/projected/38dfea75-55ee-4dd7-abfe-f28555b9cfd3-kube-api-access-vkp6r\") pod \"crc-debug-txqzk\" (UID: \"38dfea75-55ee-4dd7-abfe-f28555b9cfd3\") " pod="openshift-must-gather-9kwz5/crc-debug-txqzk" Dec 02 15:48:38 crc kubenswrapper[4902]: I1202 15:48:38.964038 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/crc-debug-txqzk" Dec 02 15:48:39 crc kubenswrapper[4902]: I1202 15:48:39.225606 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9kwz5/crc-debug-txqzk" event={"ID":"38dfea75-55ee-4dd7-abfe-f28555b9cfd3","Type":"ContainerStarted","Data":"80affc45ab767cbeb9320d9cc84e4ff1f72761fa2f8f1a9c6345674fb3ed2741"} Dec 02 15:48:40 crc kubenswrapper[4902]: I1202 15:48:40.234992 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9kwz5/crc-debug-txqzk" event={"ID":"38dfea75-55ee-4dd7-abfe-f28555b9cfd3","Type":"ContainerStarted","Data":"6ea99b5a2ce5431dc8cd2d36fbec85e8d7b4e58bec8026e9cee3d3d936bd3b3c"} Dec 02 15:48:40 crc kubenswrapper[4902]: I1202 15:48:40.273403 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9kwz5/crc-debug-txqzk" podStartSLOduration=2.273383646 podStartE2EDuration="2.273383646s" podCreationTimestamp="2025-12-02 15:48:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 15:48:40.265870913 +0000 UTC m=+5551.457179622" watchObservedRunningTime="2025-12-02 15:48:40.273383646 +0000 UTC m=+5551.464692355" Dec 02 15:48:41 crc kubenswrapper[4902]: I1202 15:48:41.106863 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:48:41 crc kubenswrapper[4902]: E1202 15:48:41.107356 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:48:51 crc kubenswrapper[4902]: I1202 15:48:51.085103 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5xjj4"] Dec 02 15:48:51 crc kubenswrapper[4902]: I1202 15:48:51.088713 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:48:51 crc kubenswrapper[4902]: I1202 15:48:51.101028 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5xjj4"] Dec 02 15:48:51 crc kubenswrapper[4902]: I1202 15:48:51.125322 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60f87847-4702-4965-b314-8daada381315-utilities\") pod \"certified-operators-5xjj4\" (UID: \"60f87847-4702-4965-b314-8daada381315\") " pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:48:51 crc kubenswrapper[4902]: I1202 15:48:51.125375 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g88wk\" (UniqueName: \"kubernetes.io/projected/60f87847-4702-4965-b314-8daada381315-kube-api-access-g88wk\") pod \"certified-operators-5xjj4\" (UID: \"60f87847-4702-4965-b314-8daada381315\") " pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:48:51 crc kubenswrapper[4902]: I1202 15:48:51.125479 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60f87847-4702-4965-b314-8daada381315-catalog-content\") pod \"certified-operators-5xjj4\" (UID: \"60f87847-4702-4965-b314-8daada381315\") " pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:48:51 crc kubenswrapper[4902]: I1202 15:48:51.226858 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60f87847-4702-4965-b314-8daada381315-utilities\") pod \"certified-operators-5xjj4\" (UID: \"60f87847-4702-4965-b314-8daada381315\") " pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:48:51 crc kubenswrapper[4902]: I1202 15:48:51.226907 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g88wk\" (UniqueName: \"kubernetes.io/projected/60f87847-4702-4965-b314-8daada381315-kube-api-access-g88wk\") pod \"certified-operators-5xjj4\" (UID: \"60f87847-4702-4965-b314-8daada381315\") " pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:48:51 crc kubenswrapper[4902]: I1202 15:48:51.226983 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60f87847-4702-4965-b314-8daada381315-catalog-content\") pod \"certified-operators-5xjj4\" (UID: \"60f87847-4702-4965-b314-8daada381315\") " pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:48:51 crc kubenswrapper[4902]: I1202 15:48:51.227676 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60f87847-4702-4965-b314-8daada381315-catalog-content\") pod \"certified-operators-5xjj4\" (UID: \"60f87847-4702-4965-b314-8daada381315\") " pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:48:51 crc kubenswrapper[4902]: I1202 15:48:51.227790 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60f87847-4702-4965-b314-8daada381315-utilities\") pod \"certified-operators-5xjj4\" (UID: \"60f87847-4702-4965-b314-8daada381315\") " pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:48:51 crc kubenswrapper[4902]: I1202 15:48:51.259758 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g88wk\" (UniqueName: \"kubernetes.io/projected/60f87847-4702-4965-b314-8daada381315-kube-api-access-g88wk\") pod \"certified-operators-5xjj4\" (UID: \"60f87847-4702-4965-b314-8daada381315\") " pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:48:51 crc kubenswrapper[4902]: I1202 15:48:51.410292 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:48:51 crc kubenswrapper[4902]: I1202 15:48:51.997243 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5xjj4"] Dec 02 15:48:53 crc kubenswrapper[4902]: I1202 15:48:53.357908 4902 generic.go:334] "Generic (PLEG): container finished" podID="60f87847-4702-4965-b314-8daada381315" containerID="d93f1bbe1a15cd44ac9795e421e0496e16967903569ec0f46bd0a1dcfbe2cfcc" exitCode=0 Dec 02 15:48:53 crc kubenswrapper[4902]: I1202 15:48:53.357972 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xjj4" event={"ID":"60f87847-4702-4965-b314-8daada381315","Type":"ContainerDied","Data":"d93f1bbe1a15cd44ac9795e421e0496e16967903569ec0f46bd0a1dcfbe2cfcc"} Dec 02 15:48:53 crc kubenswrapper[4902]: I1202 15:48:53.358300 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xjj4" event={"ID":"60f87847-4702-4965-b314-8daada381315","Type":"ContainerStarted","Data":"25c2b0e6cdd4a52a275040ad24d4ced665bb8726b2a3fa976f2efa2822c4b9b0"} Dec 02 15:48:55 crc kubenswrapper[4902]: I1202 15:48:55.106551 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:48:55 crc kubenswrapper[4902]: E1202 15:48:55.107457 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:48:55 crc kubenswrapper[4902]: I1202 15:48:55.383981 4902 generic.go:334] "Generic (PLEG): container finished" podID="60f87847-4702-4965-b314-8daada381315" containerID="28a5ab8d8f5768938b6e939bedb01456f1d0be7f6953e97bfa8e797c8d2bdfda" exitCode=0 Dec 02 15:48:55 crc kubenswrapper[4902]: I1202 15:48:55.384026 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xjj4" event={"ID":"60f87847-4702-4965-b314-8daada381315","Type":"ContainerDied","Data":"28a5ab8d8f5768938b6e939bedb01456f1d0be7f6953e97bfa8e797c8d2bdfda"} Dec 02 15:48:57 crc kubenswrapper[4902]: I1202 15:48:57.402392 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xjj4" event={"ID":"60f87847-4702-4965-b314-8daada381315","Type":"ContainerStarted","Data":"36c85a376be5d74bc1482d87a4b059939d5f86f6d035b88cb306b14038bc3e02"} Dec 02 15:48:57 crc kubenswrapper[4902]: I1202 15:48:57.425216 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5xjj4" podStartSLOduration=3.251962014 podStartE2EDuration="6.425190494s" podCreationTimestamp="2025-12-02 15:48:51 +0000 UTC" firstStartedPulling="2025-12-02 15:48:53.361826831 +0000 UTC m=+5564.553135540" lastFinishedPulling="2025-12-02 15:48:56.535055301 +0000 UTC m=+5567.726364020" observedRunningTime="2025-12-02 15:48:57.419276426 +0000 UTC m=+5568.610585135" watchObservedRunningTime="2025-12-02 15:48:57.425190494 +0000 UTC m=+5568.616499203" Dec 02 15:49:01 crc kubenswrapper[4902]: I1202 15:49:01.410528 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:49:01 crc kubenswrapper[4902]: I1202 15:49:01.411064 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:49:01 crc kubenswrapper[4902]: I1202 15:49:01.465676 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:49:01 crc kubenswrapper[4902]: I1202 15:49:01.511831 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:49:01 crc kubenswrapper[4902]: I1202 15:49:01.701897 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5xjj4"] Dec 02 15:49:03 crc kubenswrapper[4902]: I1202 15:49:03.461043 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5xjj4" podUID="60f87847-4702-4965-b314-8daada381315" containerName="registry-server" containerID="cri-o://36c85a376be5d74bc1482d87a4b059939d5f86f6d035b88cb306b14038bc3e02" gracePeriod=2 Dec 02 15:49:04 crc kubenswrapper[4902]: I1202 15:49:04.471543 4902 generic.go:334] "Generic (PLEG): container finished" podID="60f87847-4702-4965-b314-8daada381315" containerID="36c85a376be5d74bc1482d87a4b059939d5f86f6d035b88cb306b14038bc3e02" exitCode=0 Dec 02 15:49:04 crc kubenswrapper[4902]: I1202 15:49:04.471877 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xjj4" event={"ID":"60f87847-4702-4965-b314-8daada381315","Type":"ContainerDied","Data":"36c85a376be5d74bc1482d87a4b059939d5f86f6d035b88cb306b14038bc3e02"} Dec 02 15:49:04 crc kubenswrapper[4902]: I1202 15:49:04.471913 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xjj4" event={"ID":"60f87847-4702-4965-b314-8daada381315","Type":"ContainerDied","Data":"25c2b0e6cdd4a52a275040ad24d4ced665bb8726b2a3fa976f2efa2822c4b9b0"} Dec 02 15:49:04 crc kubenswrapper[4902]: I1202 15:49:04.471928 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25c2b0e6cdd4a52a275040ad24d4ced665bb8726b2a3fa976f2efa2822c4b9b0" Dec 02 15:49:04 crc kubenswrapper[4902]: I1202 15:49:04.628042 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:49:04 crc kubenswrapper[4902]: I1202 15:49:04.713230 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60f87847-4702-4965-b314-8daada381315-utilities\") pod \"60f87847-4702-4965-b314-8daada381315\" (UID: \"60f87847-4702-4965-b314-8daada381315\") " Dec 02 15:49:04 crc kubenswrapper[4902]: I1202 15:49:04.713803 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g88wk\" (UniqueName: \"kubernetes.io/projected/60f87847-4702-4965-b314-8daada381315-kube-api-access-g88wk\") pod \"60f87847-4702-4965-b314-8daada381315\" (UID: \"60f87847-4702-4965-b314-8daada381315\") " Dec 02 15:49:04 crc kubenswrapper[4902]: I1202 15:49:04.713832 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60f87847-4702-4965-b314-8daada381315-catalog-content\") pod \"60f87847-4702-4965-b314-8daada381315\" (UID: \"60f87847-4702-4965-b314-8daada381315\") " Dec 02 15:49:04 crc kubenswrapper[4902]: I1202 15:49:04.714576 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60f87847-4702-4965-b314-8daada381315-utilities" (OuterVolumeSpecName: "utilities") pod "60f87847-4702-4965-b314-8daada381315" (UID: "60f87847-4702-4965-b314-8daada381315"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:49:04 crc kubenswrapper[4902]: I1202 15:49:04.720353 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60f87847-4702-4965-b314-8daada381315-kube-api-access-g88wk" (OuterVolumeSpecName: "kube-api-access-g88wk") pod "60f87847-4702-4965-b314-8daada381315" (UID: "60f87847-4702-4965-b314-8daada381315"). InnerVolumeSpecName "kube-api-access-g88wk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:49:04 crc kubenswrapper[4902]: I1202 15:49:04.779728 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60f87847-4702-4965-b314-8daada381315-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "60f87847-4702-4965-b314-8daada381315" (UID: "60f87847-4702-4965-b314-8daada381315"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:49:04 crc kubenswrapper[4902]: I1202 15:49:04.815872 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g88wk\" (UniqueName: \"kubernetes.io/projected/60f87847-4702-4965-b314-8daada381315-kube-api-access-g88wk\") on node \"crc\" DevicePath \"\"" Dec 02 15:49:04 crc kubenswrapper[4902]: I1202 15:49:04.815911 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60f87847-4702-4965-b314-8daada381315-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:49:04 crc kubenswrapper[4902]: I1202 15:49:04.815924 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60f87847-4702-4965-b314-8daada381315-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:49:05 crc kubenswrapper[4902]: I1202 15:49:05.479082 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xjj4" Dec 02 15:49:05 crc kubenswrapper[4902]: I1202 15:49:05.499983 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5xjj4"] Dec 02 15:49:05 crc kubenswrapper[4902]: I1202 15:49:05.508854 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5xjj4"] Dec 02 15:49:07 crc kubenswrapper[4902]: I1202 15:49:07.108091 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:49:07 crc kubenswrapper[4902]: E1202 15:49:07.108825 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:49:07 crc kubenswrapper[4902]: I1202 15:49:07.118728 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60f87847-4702-4965-b314-8daada381315" path="/var/lib/kubelet/pods/60f87847-4702-4965-b314-8daada381315/volumes" Dec 02 15:49:21 crc kubenswrapper[4902]: I1202 15:49:21.106320 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:49:21 crc kubenswrapper[4902]: E1202 15:49:21.107008 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:49:22 crc kubenswrapper[4902]: I1202 15:49:22.660017 4902 generic.go:334] "Generic (PLEG): container finished" podID="38dfea75-55ee-4dd7-abfe-f28555b9cfd3" containerID="6ea99b5a2ce5431dc8cd2d36fbec85e8d7b4e58bec8026e9cee3d3d936bd3b3c" exitCode=0 Dec 02 15:49:22 crc kubenswrapper[4902]: I1202 15:49:22.660084 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9kwz5/crc-debug-txqzk" event={"ID":"38dfea75-55ee-4dd7-abfe-f28555b9cfd3","Type":"ContainerDied","Data":"6ea99b5a2ce5431dc8cd2d36fbec85e8d7b4e58bec8026e9cee3d3d936bd3b3c"} Dec 02 15:49:23 crc kubenswrapper[4902]: I1202 15:49:23.811166 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/crc-debug-txqzk" Dec 02 15:49:23 crc kubenswrapper[4902]: I1202 15:49:23.818451 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/38dfea75-55ee-4dd7-abfe-f28555b9cfd3-host\") pod \"38dfea75-55ee-4dd7-abfe-f28555b9cfd3\" (UID: \"38dfea75-55ee-4dd7-abfe-f28555b9cfd3\") " Dec 02 15:49:23 crc kubenswrapper[4902]: I1202 15:49:23.818647 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkp6r\" (UniqueName: \"kubernetes.io/projected/38dfea75-55ee-4dd7-abfe-f28555b9cfd3-kube-api-access-vkp6r\") pod \"38dfea75-55ee-4dd7-abfe-f28555b9cfd3\" (UID: \"38dfea75-55ee-4dd7-abfe-f28555b9cfd3\") " Dec 02 15:49:23 crc kubenswrapper[4902]: I1202 15:49:23.820201 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38dfea75-55ee-4dd7-abfe-f28555b9cfd3-host" (OuterVolumeSpecName: "host") pod "38dfea75-55ee-4dd7-abfe-f28555b9cfd3" (UID: "38dfea75-55ee-4dd7-abfe-f28555b9cfd3"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 15:49:23 crc kubenswrapper[4902]: I1202 15:49:23.827125 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38dfea75-55ee-4dd7-abfe-f28555b9cfd3-kube-api-access-vkp6r" (OuterVolumeSpecName: "kube-api-access-vkp6r") pod "38dfea75-55ee-4dd7-abfe-f28555b9cfd3" (UID: "38dfea75-55ee-4dd7-abfe-f28555b9cfd3"). InnerVolumeSpecName "kube-api-access-vkp6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:49:23 crc kubenswrapper[4902]: I1202 15:49:23.855035 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9kwz5/crc-debug-txqzk"] Dec 02 15:49:23 crc kubenswrapper[4902]: I1202 15:49:23.863199 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9kwz5/crc-debug-txqzk"] Dec 02 15:49:23 crc kubenswrapper[4902]: I1202 15:49:23.921267 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkp6r\" (UniqueName: \"kubernetes.io/projected/38dfea75-55ee-4dd7-abfe-f28555b9cfd3-kube-api-access-vkp6r\") on node \"crc\" DevicePath \"\"" Dec 02 15:49:23 crc kubenswrapper[4902]: I1202 15:49:23.921310 4902 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/38dfea75-55ee-4dd7-abfe-f28555b9cfd3-host\") on node \"crc\" DevicePath \"\"" Dec 02 15:49:24 crc kubenswrapper[4902]: I1202 15:49:24.678822 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80affc45ab767cbeb9320d9cc84e4ff1f72761fa2f8f1a9c6345674fb3ed2741" Dec 02 15:49:24 crc kubenswrapper[4902]: I1202 15:49:24.678884 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/crc-debug-txqzk" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.101255 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9kwz5/crc-debug-mrcls"] Dec 02 15:49:25 crc kubenswrapper[4902]: E1202 15:49:25.101882 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60f87847-4702-4965-b314-8daada381315" containerName="extract-content" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.101895 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="60f87847-4702-4965-b314-8daada381315" containerName="extract-content" Dec 02 15:49:25 crc kubenswrapper[4902]: E1202 15:49:25.101915 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60f87847-4702-4965-b314-8daada381315" containerName="registry-server" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.101921 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="60f87847-4702-4965-b314-8daada381315" containerName="registry-server" Dec 02 15:49:25 crc kubenswrapper[4902]: E1202 15:49:25.101947 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38dfea75-55ee-4dd7-abfe-f28555b9cfd3" containerName="container-00" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.101956 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="38dfea75-55ee-4dd7-abfe-f28555b9cfd3" containerName="container-00" Dec 02 15:49:25 crc kubenswrapper[4902]: E1202 15:49:25.101963 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60f87847-4702-4965-b314-8daada381315" containerName="extract-utilities" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.101969 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="60f87847-4702-4965-b314-8daada381315" containerName="extract-utilities" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.102154 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="60f87847-4702-4965-b314-8daada381315" containerName="registry-server" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.102171 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="38dfea75-55ee-4dd7-abfe-f28555b9cfd3" containerName="container-00" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.102830 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/crc-debug-mrcls" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.115896 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38dfea75-55ee-4dd7-abfe-f28555b9cfd3" path="/var/lib/kubelet/pods/38dfea75-55ee-4dd7-abfe-f28555b9cfd3/volumes" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.245279 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sls9c\" (UniqueName: \"kubernetes.io/projected/08598661-a5e2-4d6e-a378-d97326481d68-kube-api-access-sls9c\") pod \"crc-debug-mrcls\" (UID: \"08598661-a5e2-4d6e-a378-d97326481d68\") " pod="openshift-must-gather-9kwz5/crc-debug-mrcls" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.245329 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/08598661-a5e2-4d6e-a378-d97326481d68-host\") pod \"crc-debug-mrcls\" (UID: \"08598661-a5e2-4d6e-a378-d97326481d68\") " pod="openshift-must-gather-9kwz5/crc-debug-mrcls" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.347058 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sls9c\" (UniqueName: \"kubernetes.io/projected/08598661-a5e2-4d6e-a378-d97326481d68-kube-api-access-sls9c\") pod \"crc-debug-mrcls\" (UID: \"08598661-a5e2-4d6e-a378-d97326481d68\") " pod="openshift-must-gather-9kwz5/crc-debug-mrcls" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.347119 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/08598661-a5e2-4d6e-a378-d97326481d68-host\") pod \"crc-debug-mrcls\" (UID: \"08598661-a5e2-4d6e-a378-d97326481d68\") " pod="openshift-must-gather-9kwz5/crc-debug-mrcls" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.347267 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/08598661-a5e2-4d6e-a378-d97326481d68-host\") pod \"crc-debug-mrcls\" (UID: \"08598661-a5e2-4d6e-a378-d97326481d68\") " pod="openshift-must-gather-9kwz5/crc-debug-mrcls" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.362511 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sls9c\" (UniqueName: \"kubernetes.io/projected/08598661-a5e2-4d6e-a378-d97326481d68-kube-api-access-sls9c\") pod \"crc-debug-mrcls\" (UID: \"08598661-a5e2-4d6e-a378-d97326481d68\") " pod="openshift-must-gather-9kwz5/crc-debug-mrcls" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.418665 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/crc-debug-mrcls" Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.689315 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9kwz5/crc-debug-mrcls" event={"ID":"08598661-a5e2-4d6e-a378-d97326481d68","Type":"ContainerStarted","Data":"4ef880a474a9bdf78e7fc55d0b275f5e571d164d7955b9fc2a9c0e6f0bc33588"} Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.689580 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9kwz5/crc-debug-mrcls" event={"ID":"08598661-a5e2-4d6e-a378-d97326481d68","Type":"ContainerStarted","Data":"3f2d4c2ae5b2d1d3a12a56f07f65117cf545f4d2b84f876e79b3eb097ebdf190"} Dec 02 15:49:25 crc kubenswrapper[4902]: I1202 15:49:25.710439 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9kwz5/crc-debug-mrcls" podStartSLOduration=0.710424785 podStartE2EDuration="710.424785ms" podCreationTimestamp="2025-12-02 15:49:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 15:49:25.705771613 +0000 UTC m=+5596.897080322" watchObservedRunningTime="2025-12-02 15:49:25.710424785 +0000 UTC m=+5596.901733494" Dec 02 15:49:26 crc kubenswrapper[4902]: I1202 15:49:26.697227 4902 generic.go:334] "Generic (PLEG): container finished" podID="08598661-a5e2-4d6e-a378-d97326481d68" containerID="4ef880a474a9bdf78e7fc55d0b275f5e571d164d7955b9fc2a9c0e6f0bc33588" exitCode=0 Dec 02 15:49:26 crc kubenswrapper[4902]: I1202 15:49:26.697277 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9kwz5/crc-debug-mrcls" event={"ID":"08598661-a5e2-4d6e-a378-d97326481d68","Type":"ContainerDied","Data":"4ef880a474a9bdf78e7fc55d0b275f5e571d164d7955b9fc2a9c0e6f0bc33588"} Dec 02 15:49:27 crc kubenswrapper[4902]: I1202 15:49:27.819864 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/crc-debug-mrcls" Dec 02 15:49:27 crc kubenswrapper[4902]: I1202 15:49:27.989059 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/08598661-a5e2-4d6e-a378-d97326481d68-host\") pod \"08598661-a5e2-4d6e-a378-d97326481d68\" (UID: \"08598661-a5e2-4d6e-a378-d97326481d68\") " Dec 02 15:49:27 crc kubenswrapper[4902]: I1202 15:49:27.989400 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sls9c\" (UniqueName: \"kubernetes.io/projected/08598661-a5e2-4d6e-a378-d97326481d68-kube-api-access-sls9c\") pod \"08598661-a5e2-4d6e-a378-d97326481d68\" (UID: \"08598661-a5e2-4d6e-a378-d97326481d68\") " Dec 02 15:49:27 crc kubenswrapper[4902]: I1202 15:49:27.989412 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/08598661-a5e2-4d6e-a378-d97326481d68-host" (OuterVolumeSpecName: "host") pod "08598661-a5e2-4d6e-a378-d97326481d68" (UID: "08598661-a5e2-4d6e-a378-d97326481d68"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 15:49:27 crc kubenswrapper[4902]: I1202 15:49:27.990498 4902 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/08598661-a5e2-4d6e-a378-d97326481d68-host\") on node \"crc\" DevicePath \"\"" Dec 02 15:49:28 crc kubenswrapper[4902]: I1202 15:49:28.000762 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08598661-a5e2-4d6e-a378-d97326481d68-kube-api-access-sls9c" (OuterVolumeSpecName: "kube-api-access-sls9c") pod "08598661-a5e2-4d6e-a378-d97326481d68" (UID: "08598661-a5e2-4d6e-a378-d97326481d68"). InnerVolumeSpecName "kube-api-access-sls9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:49:28 crc kubenswrapper[4902]: I1202 15:49:28.015312 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9kwz5/crc-debug-mrcls"] Dec 02 15:49:28 crc kubenswrapper[4902]: I1202 15:49:28.023261 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9kwz5/crc-debug-mrcls"] Dec 02 15:49:28 crc kubenswrapper[4902]: I1202 15:49:28.091813 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sls9c\" (UniqueName: \"kubernetes.io/projected/08598661-a5e2-4d6e-a378-d97326481d68-kube-api-access-sls9c\") on node \"crc\" DevicePath \"\"" Dec 02 15:49:28 crc kubenswrapper[4902]: I1202 15:49:28.713678 4902 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f2d4c2ae5b2d1d3a12a56f07f65117cf545f4d2b84f876e79b3eb097ebdf190" Dec 02 15:49:28 crc kubenswrapper[4902]: I1202 15:49:28.713933 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/crc-debug-mrcls" Dec 02 15:49:29 crc kubenswrapper[4902]: I1202 15:49:29.138426 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08598661-a5e2-4d6e-a378-d97326481d68" path="/var/lib/kubelet/pods/08598661-a5e2-4d6e-a378-d97326481d68/volumes" Dec 02 15:49:29 crc kubenswrapper[4902]: I1202 15:49:29.387800 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9kwz5/crc-debug-djxj7"] Dec 02 15:49:29 crc kubenswrapper[4902]: E1202 15:49:29.388425 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08598661-a5e2-4d6e-a378-d97326481d68" containerName="container-00" Dec 02 15:49:29 crc kubenswrapper[4902]: I1202 15:49:29.388440 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="08598661-a5e2-4d6e-a378-d97326481d68" containerName="container-00" Dec 02 15:49:29 crc kubenswrapper[4902]: I1202 15:49:29.388647 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="08598661-a5e2-4d6e-a378-d97326481d68" containerName="container-00" Dec 02 15:49:29 crc kubenswrapper[4902]: I1202 15:49:29.389294 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/crc-debug-djxj7" Dec 02 15:49:29 crc kubenswrapper[4902]: I1202 15:49:29.527364 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhvjl\" (UniqueName: \"kubernetes.io/projected/39564a16-d075-44db-ab09-46d2578c2172-kube-api-access-nhvjl\") pod \"crc-debug-djxj7\" (UID: \"39564a16-d075-44db-ab09-46d2578c2172\") " pod="openshift-must-gather-9kwz5/crc-debug-djxj7" Dec 02 15:49:29 crc kubenswrapper[4902]: I1202 15:49:29.527469 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/39564a16-d075-44db-ab09-46d2578c2172-host\") pod \"crc-debug-djxj7\" (UID: \"39564a16-d075-44db-ab09-46d2578c2172\") " pod="openshift-must-gather-9kwz5/crc-debug-djxj7" Dec 02 15:49:29 crc kubenswrapper[4902]: I1202 15:49:29.629174 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/39564a16-d075-44db-ab09-46d2578c2172-host\") pod \"crc-debug-djxj7\" (UID: \"39564a16-d075-44db-ab09-46d2578c2172\") " pod="openshift-must-gather-9kwz5/crc-debug-djxj7" Dec 02 15:49:29 crc kubenswrapper[4902]: I1202 15:49:29.629282 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/39564a16-d075-44db-ab09-46d2578c2172-host\") pod \"crc-debug-djxj7\" (UID: \"39564a16-d075-44db-ab09-46d2578c2172\") " pod="openshift-must-gather-9kwz5/crc-debug-djxj7" Dec 02 15:49:29 crc kubenswrapper[4902]: I1202 15:49:29.629422 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhvjl\" (UniqueName: \"kubernetes.io/projected/39564a16-d075-44db-ab09-46d2578c2172-kube-api-access-nhvjl\") pod \"crc-debug-djxj7\" (UID: \"39564a16-d075-44db-ab09-46d2578c2172\") " pod="openshift-must-gather-9kwz5/crc-debug-djxj7" Dec 02 15:49:29 crc kubenswrapper[4902]: I1202 15:49:29.649557 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhvjl\" (UniqueName: \"kubernetes.io/projected/39564a16-d075-44db-ab09-46d2578c2172-kube-api-access-nhvjl\") pod \"crc-debug-djxj7\" (UID: \"39564a16-d075-44db-ab09-46d2578c2172\") " pod="openshift-must-gather-9kwz5/crc-debug-djxj7" Dec 02 15:49:29 crc kubenswrapper[4902]: I1202 15:49:29.704305 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/crc-debug-djxj7" Dec 02 15:49:29 crc kubenswrapper[4902]: W1202 15:49:29.739808 4902 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod39564a16_d075_44db_ab09_46d2578c2172.slice/crio-afe31d2741b41daca5ab44c0f1830caaeff78f331d8cd6f04d12c39e0ee51711 WatchSource:0}: Error finding container afe31d2741b41daca5ab44c0f1830caaeff78f331d8cd6f04d12c39e0ee51711: Status 404 returned error can't find the container with id afe31d2741b41daca5ab44c0f1830caaeff78f331d8cd6f04d12c39e0ee51711 Dec 02 15:49:30 crc kubenswrapper[4902]: I1202 15:49:30.733941 4902 generic.go:334] "Generic (PLEG): container finished" podID="39564a16-d075-44db-ab09-46d2578c2172" containerID="4090fc354517c248b2a9e7291e479bb3c8dcd78ae1106115208214e9f8dc8c80" exitCode=0 Dec 02 15:49:30 crc kubenswrapper[4902]: I1202 15:49:30.734346 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9kwz5/crc-debug-djxj7" event={"ID":"39564a16-d075-44db-ab09-46d2578c2172","Type":"ContainerDied","Data":"4090fc354517c248b2a9e7291e479bb3c8dcd78ae1106115208214e9f8dc8c80"} Dec 02 15:49:30 crc kubenswrapper[4902]: I1202 15:49:30.734386 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9kwz5/crc-debug-djxj7" event={"ID":"39564a16-d075-44db-ab09-46d2578c2172","Type":"ContainerStarted","Data":"afe31d2741b41daca5ab44c0f1830caaeff78f331d8cd6f04d12c39e0ee51711"} Dec 02 15:49:30 crc kubenswrapper[4902]: I1202 15:49:30.787985 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9kwz5/crc-debug-djxj7"] Dec 02 15:49:30 crc kubenswrapper[4902]: I1202 15:49:30.800617 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9kwz5/crc-debug-djxj7"] Dec 02 15:49:31 crc kubenswrapper[4902]: I1202 15:49:31.886097 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/crc-debug-djxj7" Dec 02 15:49:31 crc kubenswrapper[4902]: I1202 15:49:31.985573 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhvjl\" (UniqueName: \"kubernetes.io/projected/39564a16-d075-44db-ab09-46d2578c2172-kube-api-access-nhvjl\") pod \"39564a16-d075-44db-ab09-46d2578c2172\" (UID: \"39564a16-d075-44db-ab09-46d2578c2172\") " Dec 02 15:49:31 crc kubenswrapper[4902]: I1202 15:49:31.985664 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/39564a16-d075-44db-ab09-46d2578c2172-host\") pod \"39564a16-d075-44db-ab09-46d2578c2172\" (UID: \"39564a16-d075-44db-ab09-46d2578c2172\") " Dec 02 15:49:31 crc kubenswrapper[4902]: I1202 15:49:31.985996 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/39564a16-d075-44db-ab09-46d2578c2172-host" (OuterVolumeSpecName: "host") pod "39564a16-d075-44db-ab09-46d2578c2172" (UID: "39564a16-d075-44db-ab09-46d2578c2172"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 15:49:32 crc kubenswrapper[4902]: I1202 15:49:32.086790 4902 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/39564a16-d075-44db-ab09-46d2578c2172-host\") on node \"crc\" DevicePath \"\"" Dec 02 15:49:32 crc kubenswrapper[4902]: I1202 15:49:32.755846 4902 scope.go:117] "RemoveContainer" containerID="4090fc354517c248b2a9e7291e479bb3c8dcd78ae1106115208214e9f8dc8c80" Dec 02 15:49:32 crc kubenswrapper[4902]: I1202 15:49:32.756257 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/crc-debug-djxj7" Dec 02 15:49:33 crc kubenswrapper[4902]: I1202 15:49:33.736356 4902 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-k4csg container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 15:49:33 crc kubenswrapper[4902]: I1202 15:49:33.736406 4902 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-k4csg container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 15:49:33 crc kubenswrapper[4902]: I1202 15:49:33.736493 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" podUID="f0fd5896-8b33-4fb3-a56c-1e7741ca29df" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 02 15:49:33 crc kubenswrapper[4902]: I1202 15:49:33.736428 4902 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-k4csg" podUID="f0fd5896-8b33-4fb3-a56c-1e7741ca29df" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 02 15:49:33 crc kubenswrapper[4902]: I1202 15:49:33.799082 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39564a16-d075-44db-ab09-46d2578c2172-kube-api-access-nhvjl" (OuterVolumeSpecName: "kube-api-access-nhvjl") pod "39564a16-d075-44db-ab09-46d2578c2172" (UID: "39564a16-d075-44db-ab09-46d2578c2172"). InnerVolumeSpecName "kube-api-access-nhvjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:49:33 crc kubenswrapper[4902]: I1202 15:49:33.826211 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhvjl\" (UniqueName: \"kubernetes.io/projected/39564a16-d075-44db-ab09-46d2578c2172-kube-api-access-nhvjl\") on node \"crc\" DevicePath \"\"" Dec 02 15:49:35 crc kubenswrapper[4902]: I1202 15:49:35.117734 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39564a16-d075-44db-ab09-46d2578c2172" path="/var/lib/kubelet/pods/39564a16-d075-44db-ab09-46d2578c2172/volumes" Dec 02 15:49:36 crc kubenswrapper[4902]: I1202 15:49:36.107185 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:49:36 crc kubenswrapper[4902]: I1202 15:49:36.800723 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"d4b452993d28c19f8db0d9055f4cfbc1fd8767ae2c03f2cd2b0edb49844d2e3c"} Dec 02 15:50:07 crc kubenswrapper[4902]: I1202 15:50:07.823327 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54c8dd8948-wqzzm_eb085e9d-79c7-495c-b0b3-7fbe62e96cf7/barbican-api/0.log" Dec 02 15:50:08 crc kubenswrapper[4902]: I1202 15:50:08.033457 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54c8dd8948-wqzzm_eb085e9d-79c7-495c-b0b3-7fbe62e96cf7/barbican-api-log/0.log" Dec 02 15:50:08 crc kubenswrapper[4902]: I1202 15:50:08.055876 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7cb4b5cd86-6nl4d_1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64/barbican-keystone-listener/0.log" Dec 02 15:50:08 crc kubenswrapper[4902]: I1202 15:50:08.187251 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7cb4b5cd86-6nl4d_1efd9a7b-fca6-4cc2-a3fd-5b5db65dba64/barbican-keystone-listener-log/0.log" Dec 02 15:50:08 crc kubenswrapper[4902]: I1202 15:50:08.268090 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5d54dc89f-fxxfb_bea5327c-b120-4e9b-876c-94e46621dcad/barbican-worker/0.log" Dec 02 15:50:08 crc kubenswrapper[4902]: I1202 15:50:08.288320 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5d54dc89f-fxxfb_bea5327c-b120-4e9b-876c-94e46621dcad/barbican-worker-log/0.log" Dec 02 15:50:08 crc kubenswrapper[4902]: I1202 15:50:08.509619 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-zl82b_0d54fbdd-0a68-449d-a2ef-1a4de4c25b02/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:08 crc kubenswrapper[4902]: I1202 15:50:08.641245 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_966e0415-09e4-4b17-9806-7d6e570ed19a/ceilometer-central-agent/0.log" Dec 02 15:50:08 crc kubenswrapper[4902]: I1202 15:50:08.706301 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_966e0415-09e4-4b17-9806-7d6e570ed19a/ceilometer-notification-agent/0.log" Dec 02 15:50:08 crc kubenswrapper[4902]: I1202 15:50:08.723854 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_966e0415-09e4-4b17-9806-7d6e570ed19a/proxy-httpd/0.log" Dec 02 15:50:08 crc kubenswrapper[4902]: I1202 15:50:08.797156 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_966e0415-09e4-4b17-9806-7d6e570ed19a/sg-core/0.log" Dec 02 15:50:09 crc kubenswrapper[4902]: I1202 15:50:09.009930 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e646a9bd-99e5-41c1-8187-076691cad16e/cinder-api-log/0.log" Dec 02 15:50:09 crc kubenswrapper[4902]: I1202 15:50:09.049012 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e646a9bd-99e5-41c1-8187-076691cad16e/cinder-api/0.log" Dec 02 15:50:09 crc kubenswrapper[4902]: I1202 15:50:09.122583 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b464cf62-dd2e-4885-9aeb-98c7da8d9e37/cinder-scheduler/0.log" Dec 02 15:50:09 crc kubenswrapper[4902]: I1202 15:50:09.252230 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b464cf62-dd2e-4885-9aeb-98c7da8d9e37/probe/0.log" Dec 02 15:50:09 crc kubenswrapper[4902]: I1202 15:50:09.357399 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-wh6n2_f5c865c3-fd5d-4bc9-bf96-d1f57ff89203/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:09 crc kubenswrapper[4902]: I1202 15:50:09.473973 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-lbdz5_c491ad4c-9428-41e3-8c4d-bcd59ff50ca3/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:09 crc kubenswrapper[4902]: I1202 15:50:09.570432 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd9bffc9-84qbs_a97892a2-52af-4c1b-9db8-be3b7522774d/init/0.log" Dec 02 15:50:09 crc kubenswrapper[4902]: I1202 15:50:09.727108 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd9bffc9-84qbs_a97892a2-52af-4c1b-9db8-be3b7522774d/init/0.log" Dec 02 15:50:09 crc kubenswrapper[4902]: I1202 15:50:09.826016 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd9bffc9-84qbs_a97892a2-52af-4c1b-9db8-be3b7522774d/dnsmasq-dns/0.log" Dec 02 15:50:10 crc kubenswrapper[4902]: I1202 15:50:10.026279 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-mj8dp_359a7d26-5917-4e98-be9e-55f4702c2ac7/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:10 crc kubenswrapper[4902]: I1202 15:50:10.152842 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_3d0b949e-6c1b-4c5c-955e-53e6ab278555/glance-log/0.log" Dec 02 15:50:10 crc kubenswrapper[4902]: I1202 15:50:10.216298 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_3d0b949e-6c1b-4c5c-955e-53e6ab278555/glance-httpd/0.log" Dec 02 15:50:10 crc kubenswrapper[4902]: I1202 15:50:10.342267 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e/glance-log/0.log" Dec 02 15:50:10 crc kubenswrapper[4902]: I1202 15:50:10.385234 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_9c1d4ba1-ad0d-44c5-b381-515d5a4aa93e/glance-httpd/0.log" Dec 02 15:50:10 crc kubenswrapper[4902]: I1202 15:50:10.537055 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-54c4bbdfbb-v8pjf_462ead25-02ec-4791-b927-56cf3f26ca39/horizon/0.log" Dec 02 15:50:10 crc kubenswrapper[4902]: I1202 15:50:10.748481 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-4dxvp_53ecd2a3-800b-4718-bf18-8b77eb9bbbe8/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:10 crc kubenswrapper[4902]: I1202 15:50:10.958499 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-s9g5z_0cb0ec2d-c764-45bf-a7e7-02d3f75d2628/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:11 crc kubenswrapper[4902]: I1202 15:50:11.248912 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-54c4bbdfbb-v8pjf_462ead25-02ec-4791-b927-56cf3f26ca39/horizon-log/0.log" Dec 02 15:50:11 crc kubenswrapper[4902]: I1202 15:50:11.345726 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29411461-gmgwz_f2adf6c8-4207-43b6-9149-76b5e8e13c6a/keystone-cron/0.log" Dec 02 15:50:11 crc kubenswrapper[4902]: I1202 15:50:11.521424 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-867694b54b-2t5p7_1a591864-bb12-4847-ba0a-567637fbdaa9/keystone-api/0.log" Dec 02 15:50:11 crc kubenswrapper[4902]: I1202 15:50:11.551940 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_8588d8af-b946-4d96-bd28-472301c12a7b/kube-state-metrics/0.log" Dec 02 15:50:11 crc kubenswrapper[4902]: I1202 15:50:11.728316 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-bbwkl_197fff36-7bd5-46a0-a0e1-5b986b4cfc61/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:12 crc kubenswrapper[4902]: I1202 15:50:12.133937 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-xjz6v_6b639ec2-25b0-47b7-8e8e-4ff7fa466e47/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:12 crc kubenswrapper[4902]: I1202 15:50:12.201549 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7bffdc6c65-srhkc_04c855a2-1a97-4b81-83c2-d3b51678293d/neutron-httpd/0.log" Dec 02 15:50:12 crc kubenswrapper[4902]: I1202 15:50:12.283699 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7bffdc6c65-srhkc_04c855a2-1a97-4b81-83c2-d3b51678293d/neutron-api/0.log" Dec 02 15:50:12 crc kubenswrapper[4902]: I1202 15:50:12.949823 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_c866917d-0d79-4d60-8d9b-066986964749/nova-cell0-conductor-conductor/0.log" Dec 02 15:50:13 crc kubenswrapper[4902]: I1202 15:50:13.171824 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_9eccfb03-21f3-4259-85bc-625274a3b3a8/nova-cell1-conductor-conductor/0.log" Dec 02 15:50:13 crc kubenswrapper[4902]: I1202 15:50:13.522529 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_c8412330-d9d3-42bd-be8a-212966221fda/nova-api-log/0.log" Dec 02 15:50:13 crc kubenswrapper[4902]: I1202 15:50:13.701069 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_abcdfeb8-008e-4d99-8860-179b489f7783/nova-cell1-novncproxy-novncproxy/0.log" Dec 02 15:50:13 crc kubenswrapper[4902]: I1202 15:50:13.976249 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-vmxms_61892f44-715d-453e-83fd-b62cd886d24e/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:14 crc kubenswrapper[4902]: I1202 15:50:14.045012 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_50eb495f-ceaa-4583-b3d6-42ba67a92160/nova-metadata-log/0.log" Dec 02 15:50:14 crc kubenswrapper[4902]: I1202 15:50:14.113236 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_c8412330-d9d3-42bd-be8a-212966221fda/nova-api-api/0.log" Dec 02 15:50:14 crc kubenswrapper[4902]: I1202 15:50:14.528868 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_3525dcb7-da07-49a6-8786-5e046303b028/mysql-bootstrap/0.log" Dec 02 15:50:14 crc kubenswrapper[4902]: I1202 15:50:14.688665 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_10a0095a-bf9d-45ec-9b8f-1b1543ed641c/nova-scheduler-scheduler/0.log" Dec 02 15:50:14 crc kubenswrapper[4902]: I1202 15:50:14.717549 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_3525dcb7-da07-49a6-8786-5e046303b028/mysql-bootstrap/0.log" Dec 02 15:50:14 crc kubenswrapper[4902]: I1202 15:50:14.761341 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_3525dcb7-da07-49a6-8786-5e046303b028/galera/0.log" Dec 02 15:50:14 crc kubenswrapper[4902]: I1202 15:50:14.995902 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_ff122a60-3f1a-40cb-b1e6-e871037f63c6/mysql-bootstrap/0.log" Dec 02 15:50:15 crc kubenswrapper[4902]: I1202 15:50:15.115506 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_ff122a60-3f1a-40cb-b1e6-e871037f63c6/mysql-bootstrap/0.log" Dec 02 15:50:15 crc kubenswrapper[4902]: I1202 15:50:15.199881 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_ff122a60-3f1a-40cb-b1e6-e871037f63c6/galera/0.log" Dec 02 15:50:15 crc kubenswrapper[4902]: I1202 15:50:15.340515 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_1ac7fd15-4a88-4085-ae7c-8f646c29943f/openstackclient/0.log" Dec 02 15:50:15 crc kubenswrapper[4902]: I1202 15:50:15.463852 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-67clb_51ddf240-5cda-4f89-831f-4a20ce9997ed/ovn-controller/0.log" Dec 02 15:50:15 crc kubenswrapper[4902]: I1202 15:50:15.703633 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-68q42_a39354f8-da6e-43a2-9a3d-49e42af19590/openstack-network-exporter/0.log" Dec 02 15:50:15 crc kubenswrapper[4902]: I1202 15:50:15.863856 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dkmxc_77a6d129-70f2-4c3c-b394-1ed4cfaf5104/ovsdb-server-init/0.log" Dec 02 15:50:16 crc kubenswrapper[4902]: I1202 15:50:16.087471 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dkmxc_77a6d129-70f2-4c3c-b394-1ed4cfaf5104/ovsdb-server/0.log" Dec 02 15:50:16 crc kubenswrapper[4902]: I1202 15:50:16.093687 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dkmxc_77a6d129-70f2-4c3c-b394-1ed4cfaf5104/ovs-vswitchd/0.log" Dec 02 15:50:16 crc kubenswrapper[4902]: I1202 15:50:16.126281 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dkmxc_77a6d129-70f2-4c3c-b394-1ed4cfaf5104/ovsdb-server-init/0.log" Dec 02 15:50:16 crc kubenswrapper[4902]: I1202 15:50:16.357156 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_50eb495f-ceaa-4583-b3d6-42ba67a92160/nova-metadata-metadata/0.log" Dec 02 15:50:16 crc kubenswrapper[4902]: I1202 15:50:16.358577 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-qfls9_af830218-8303-46c8-a31a-a33fa89d0034/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:16 crc kubenswrapper[4902]: I1202 15:50:16.535090 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_412330a8-4fac-4700-a5f8-5054af56f44b/openstack-network-exporter/0.log" Dec 02 15:50:16 crc kubenswrapper[4902]: I1202 15:50:16.583808 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_412330a8-4fac-4700-a5f8-5054af56f44b/ovn-northd/0.log" Dec 02 15:50:16 crc kubenswrapper[4902]: I1202 15:50:16.601919 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_79db3800-d728-4ace-a34f-37d44c9c4892/openstack-network-exporter/0.log" Dec 02 15:50:16 crc kubenswrapper[4902]: I1202 15:50:16.785848 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_79db3800-d728-4ace-a34f-37d44c9c4892/ovsdbserver-nb/0.log" Dec 02 15:50:16 crc kubenswrapper[4902]: I1202 15:50:16.901465 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_2eb37d9d-d6ca-4b36-a153-47d2b417e26b/ovsdbserver-sb/0.log" Dec 02 15:50:16 crc kubenswrapper[4902]: I1202 15:50:16.922099 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_2eb37d9d-d6ca-4b36-a153-47d2b417e26b/openstack-network-exporter/0.log" Dec 02 15:50:17 crc kubenswrapper[4902]: I1202 15:50:17.338591 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-786fb786b8-w8tbv_6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3/placement-api/0.log" Dec 02 15:50:17 crc kubenswrapper[4902]: I1202 15:50:17.380394 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-786fb786b8-w8tbv_6fd1d3bc-6072-40b8-af0a-7fdfc09cbab3/placement-log/0.log" Dec 02 15:50:17 crc kubenswrapper[4902]: I1202 15:50:17.459534 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_e67ef796-c39e-45e9-bbcf-bfb0fd77dff4/init-config-reloader/0.log" Dec 02 15:50:17 crc kubenswrapper[4902]: I1202 15:50:17.655272 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_e67ef796-c39e-45e9-bbcf-bfb0fd77dff4/init-config-reloader/0.log" Dec 02 15:50:17 crc kubenswrapper[4902]: I1202 15:50:17.690239 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_e67ef796-c39e-45e9-bbcf-bfb0fd77dff4/prometheus/0.log" Dec 02 15:50:17 crc kubenswrapper[4902]: I1202 15:50:17.695490 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_e67ef796-c39e-45e9-bbcf-bfb0fd77dff4/config-reloader/0.log" Dec 02 15:50:17 crc kubenswrapper[4902]: I1202 15:50:17.705370 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_e67ef796-c39e-45e9-bbcf-bfb0fd77dff4/thanos-sidecar/0.log" Dec 02 15:50:17 crc kubenswrapper[4902]: I1202 15:50:17.967874 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1640e5ea-d80e-4302-9690-5ca9efaa9879/setup-container/0.log" Dec 02 15:50:18 crc kubenswrapper[4902]: I1202 15:50:18.151436 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1640e5ea-d80e-4302-9690-5ca9efaa9879/rabbitmq/0.log" Dec 02 15:50:18 crc kubenswrapper[4902]: I1202 15:50:18.175255 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1640e5ea-d80e-4302-9690-5ca9efaa9879/setup-container/0.log" Dec 02 15:50:18 crc kubenswrapper[4902]: I1202 15:50:18.235599 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6e03bb87-25fa-48ee-8e1a-20309d4d3a4b/setup-container/0.log" Dec 02 15:50:18 crc kubenswrapper[4902]: I1202 15:50:18.457619 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6e03bb87-25fa-48ee-8e1a-20309d4d3a4b/setup-container/0.log" Dec 02 15:50:18 crc kubenswrapper[4902]: I1202 15:50:18.544831 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6e03bb87-25fa-48ee-8e1a-20309d4d3a4b/rabbitmq/0.log" Dec 02 15:50:18 crc kubenswrapper[4902]: I1202 15:50:18.615178 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-q7kmf_0327d28d-2fe0-4940-b8b7-5a805b1d89bc/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:18 crc kubenswrapper[4902]: I1202 15:50:18.743903 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-v95mc_4c0e3211-2564-459f-b072-c6d07ac1da5c/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:18 crc kubenswrapper[4902]: I1202 15:50:18.829460 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-vkmsf_b851cac6-d9e2-4a83-92d8-dbd09ee7e38d/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:18 crc kubenswrapper[4902]: I1202 15:50:18.994998 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-4zh2f_2c20b53d-a975-44ae-8ec3-126956995caf/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:19 crc kubenswrapper[4902]: I1202 15:50:19.123255 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-jgmdd_77add5e2-5a0c-4a99-a46e-80645017ee83/ssh-known-hosts-edpm-deployment/0.log" Dec 02 15:50:19 crc kubenswrapper[4902]: I1202 15:50:19.401919 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-65c8455667-7wzb6_8804e694-63ac-4278-8672-56c862db1007/proxy-server/0.log" Dec 02 15:50:19 crc kubenswrapper[4902]: I1202 15:50:19.466056 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-65c8455667-7wzb6_8804e694-63ac-4278-8672-56c862db1007/proxy-httpd/0.log" Dec 02 15:50:19 crc kubenswrapper[4902]: I1202 15:50:19.526164 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-4x7nc_3f809093-c39f-40fe-a785-69a1edd2bdf9/swift-ring-rebalance/0.log" Dec 02 15:50:19 crc kubenswrapper[4902]: I1202 15:50:19.675441 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/account-auditor/0.log" Dec 02 15:50:19 crc kubenswrapper[4902]: I1202 15:50:19.705223 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/account-reaper/0.log" Dec 02 15:50:19 crc kubenswrapper[4902]: I1202 15:50:19.815557 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/account-replicator/0.log" Dec 02 15:50:19 crc kubenswrapper[4902]: I1202 15:50:19.945545 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/container-auditor/0.log" Dec 02 15:50:19 crc kubenswrapper[4902]: I1202 15:50:19.949890 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/account-server/0.log" Dec 02 15:50:20 crc kubenswrapper[4902]: I1202 15:50:20.006374 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/container-replicator/0.log" Dec 02 15:50:20 crc kubenswrapper[4902]: I1202 15:50:20.045709 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/container-server/0.log" Dec 02 15:50:20 crc kubenswrapper[4902]: I1202 15:50:20.179595 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/container-updater/0.log" Dec 02 15:50:20 crc kubenswrapper[4902]: I1202 15:50:20.237002 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/object-auditor/0.log" Dec 02 15:50:20 crc kubenswrapper[4902]: I1202 15:50:20.243689 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/object-expirer/0.log" Dec 02 15:50:20 crc kubenswrapper[4902]: I1202 15:50:20.276962 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/object-replicator/0.log" Dec 02 15:50:20 crc kubenswrapper[4902]: I1202 15:50:20.364011 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/object-server/0.log" Dec 02 15:50:20 crc kubenswrapper[4902]: I1202 15:50:20.445072 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/rsync/0.log" Dec 02 15:50:20 crc kubenswrapper[4902]: I1202 15:50:20.450925 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/object-updater/0.log" Dec 02 15:50:20 crc kubenswrapper[4902]: I1202 15:50:20.510680 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_0a523405-44a7-49cc-ae19-25ebbdbc8d73/swift-recon-cron/0.log" Dec 02 15:50:20 crc kubenswrapper[4902]: I1202 15:50:20.875068 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-vvj7w_5bbd6a08-3913-4037-81e5-f7fd18479977/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:20 crc kubenswrapper[4902]: I1202 15:50:20.990179 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_3469f18f-c530-4a00-91ba-95720c45b4c2/tempest-tests-tempest-tests-runner/0.log" Dec 02 15:50:21 crc kubenswrapper[4902]: I1202 15:50:21.121967 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_ddbe7574-fbe4-4d9a-a50d-d3f1986e0b4d/test-operator-logs-container/0.log" Dec 02 15:50:21 crc kubenswrapper[4902]: I1202 15:50:21.219442 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-9vzf4_10beb027-7c11-43eb-8d82-782f41f49b1b/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 15:50:22 crc kubenswrapper[4902]: I1202 15:50:22.039167 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-applier-0_644d8761-1b3a-4a6f-922f-596ae390bacf/watcher-applier/0.log" Dec 02 15:50:22 crc kubenswrapper[4902]: I1202 15:50:22.302305 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_479a72ce-7b6b-4da9-8315-236e285a6680/watcher-api-log/0.log" Dec 02 15:50:23 crc kubenswrapper[4902]: I1202 15:50:23.022652 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_d5838a49-52eb-4485-8eda-14772a0f60bd/watcher-decision-engine/0.log" Dec 02 15:50:24 crc kubenswrapper[4902]: I1202 15:50:24.600976 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_954f9858-8849-4b60-a1dd-1fddd9aaf65e/memcached/0.log" Dec 02 15:50:25 crc kubenswrapper[4902]: I1202 15:50:25.203160 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_479a72ce-7b6b-4da9-8315-236e285a6680/watcher-api/0.log" Dec 02 15:50:48 crc kubenswrapper[4902]: I1202 15:50:48.461729 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz_b5a8d6b1-20ad-4539-8696-c8b30997be69/util/0.log" Dec 02 15:50:48 crc kubenswrapper[4902]: I1202 15:50:48.685940 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz_b5a8d6b1-20ad-4539-8696-c8b30997be69/util/0.log" Dec 02 15:50:48 crc kubenswrapper[4902]: I1202 15:50:48.728420 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz_b5a8d6b1-20ad-4539-8696-c8b30997be69/pull/0.log" Dec 02 15:50:48 crc kubenswrapper[4902]: I1202 15:50:48.761991 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz_b5a8d6b1-20ad-4539-8696-c8b30997be69/pull/0.log" Dec 02 15:50:48 crc kubenswrapper[4902]: I1202 15:50:48.918866 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz_b5a8d6b1-20ad-4539-8696-c8b30997be69/extract/0.log" Dec 02 15:50:48 crc kubenswrapper[4902]: I1202 15:50:48.934739 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz_b5a8d6b1-20ad-4539-8696-c8b30997be69/util/0.log" Dec 02 15:50:48 crc kubenswrapper[4902]: I1202 15:50:48.958709 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_09f6aabc490a484675d73a06c24b9a231a62e55f3a8db45f214d1799b8qkhpz_b5a8d6b1-20ad-4539-8696-c8b30997be69/pull/0.log" Dec 02 15:50:49 crc kubenswrapper[4902]: I1202 15:50:49.098080 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-jgk7w_ebf2d80f-02cb-4f53-a5b1-67280d3cd74b/kube-rbac-proxy/0.log" Dec 02 15:50:49 crc kubenswrapper[4902]: I1202 15:50:49.177275 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-jgk7w_ebf2d80f-02cb-4f53-a5b1-67280d3cd74b/manager/0.log" Dec 02 15:50:49 crc kubenswrapper[4902]: I1202 15:50:49.229049 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-j55rx_605097d2-e1a6-481d-91e2-fd3b638ad7b1/kube-rbac-proxy/0.log" Dec 02 15:50:49 crc kubenswrapper[4902]: I1202 15:50:49.324943 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-j55rx_605097d2-e1a6-481d-91e2-fd3b638ad7b1/manager/0.log" Dec 02 15:50:49 crc kubenswrapper[4902]: I1202 15:50:49.411448 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-v8th7_eb55d3d7-65ac-4f76-9cb1-210cff99a5bd/kube-rbac-proxy/0.log" Dec 02 15:50:49 crc kubenswrapper[4902]: I1202 15:50:49.443971 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-v8th7_eb55d3d7-65ac-4f76-9cb1-210cff99a5bd/manager/0.log" Dec 02 15:50:49 crc kubenswrapper[4902]: I1202 15:50:49.623762 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-5w9nk_9f4cfa25-c827-4b4c-ab57-8dc0221d30b3/kube-rbac-proxy/0.log" Dec 02 15:50:49 crc kubenswrapper[4902]: I1202 15:50:49.666044 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-5w9nk_9f4cfa25-c827-4b4c-ab57-8dc0221d30b3/manager/0.log" Dec 02 15:50:49 crc kubenswrapper[4902]: I1202 15:50:49.804511 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-9nw45_4ec2dc11-e22a-40c1-926b-d987e05b8d17/manager/0.log" Dec 02 15:50:49 crc kubenswrapper[4902]: I1202 15:50:49.809336 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-9nw45_4ec2dc11-e22a-40c1-926b-d987e05b8d17/kube-rbac-proxy/0.log" Dec 02 15:50:49 crc kubenswrapper[4902]: I1202 15:50:49.890761 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-q688c_3c045445-0a69-416e-a9c6-868843e4c3e9/kube-rbac-proxy/0.log" Dec 02 15:50:49 crc kubenswrapper[4902]: I1202 15:50:49.997794 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-q688c_3c045445-0a69-416e-a9c6-868843e4c3e9/manager/0.log" Dec 02 15:50:50 crc kubenswrapper[4902]: I1202 15:50:50.107643 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-bwpd5_9c65e1ba-dd1f-4d63-ae34-c18525c76bcf/kube-rbac-proxy/0.log" Dec 02 15:50:50 crc kubenswrapper[4902]: I1202 15:50:50.244289 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-bwpd5_9c65e1ba-dd1f-4d63-ae34-c18525c76bcf/manager/0.log" Dec 02 15:50:50 crc kubenswrapper[4902]: I1202 15:50:50.277719 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-wdt4v_16c02339-b964-4c10-8beb-32402af37c34/kube-rbac-proxy/0.log" Dec 02 15:50:50 crc kubenswrapper[4902]: I1202 15:50:50.351649 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-wdt4v_16c02339-b964-4c10-8beb-32402af37c34/manager/0.log" Dec 02 15:50:50 crc kubenswrapper[4902]: I1202 15:50:50.457279 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-8bnj2_4d401ff5-aff3-4d1c-9fae-d56e1fea07f6/kube-rbac-proxy/0.log" Dec 02 15:50:50 crc kubenswrapper[4902]: I1202 15:50:50.512585 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-8bnj2_4d401ff5-aff3-4d1c-9fae-d56e1fea07f6/manager/0.log" Dec 02 15:50:50 crc kubenswrapper[4902]: I1202 15:50:50.822921 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-xvrql_fd510a47-813b-4ec1-953b-cfefa2fb890f/kube-rbac-proxy/0.log" Dec 02 15:50:50 crc kubenswrapper[4902]: I1202 15:50:50.934341 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-xvrql_fd510a47-813b-4ec1-953b-cfefa2fb890f/manager/0.log" Dec 02 15:50:50 crc kubenswrapper[4902]: I1202 15:50:50.989948 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-59lqh"] Dec 02 15:50:50 crc kubenswrapper[4902]: E1202 15:50:50.990432 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39564a16-d075-44db-ab09-46d2578c2172" containerName="container-00" Dec 02 15:50:50 crc kubenswrapper[4902]: I1202 15:50:50.990455 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="39564a16-d075-44db-ab09-46d2578c2172" containerName="container-00" Dec 02 15:50:50 crc kubenswrapper[4902]: I1202 15:50:50.990797 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="39564a16-d075-44db-ab09-46d2578c2172" containerName="container-00" Dec 02 15:50:50 crc kubenswrapper[4902]: I1202 15:50:50.992655 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.003798 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-59lqh"] Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.076746 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33a6dd30-3ecb-428d-84d6-84bdd4b07206-utilities\") pod \"community-operators-59lqh\" (UID: \"33a6dd30-3ecb-428d-84d6-84bdd4b07206\") " pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.076835 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxx2g\" (UniqueName: \"kubernetes.io/projected/33a6dd30-3ecb-428d-84d6-84bdd4b07206-kube-api-access-dxx2g\") pod \"community-operators-59lqh\" (UID: \"33a6dd30-3ecb-428d-84d6-84bdd4b07206\") " pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.076874 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33a6dd30-3ecb-428d-84d6-84bdd4b07206-catalog-content\") pod \"community-operators-59lqh\" (UID: \"33a6dd30-3ecb-428d-84d6-84bdd4b07206\") " pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.178812 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33a6dd30-3ecb-428d-84d6-84bdd4b07206-utilities\") pod \"community-operators-59lqh\" (UID: \"33a6dd30-3ecb-428d-84d6-84bdd4b07206\") " pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.178920 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxx2g\" (UniqueName: \"kubernetes.io/projected/33a6dd30-3ecb-428d-84d6-84bdd4b07206-kube-api-access-dxx2g\") pod \"community-operators-59lqh\" (UID: \"33a6dd30-3ecb-428d-84d6-84bdd4b07206\") " pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.178964 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33a6dd30-3ecb-428d-84d6-84bdd4b07206-catalog-content\") pod \"community-operators-59lqh\" (UID: \"33a6dd30-3ecb-428d-84d6-84bdd4b07206\") " pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.179485 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33a6dd30-3ecb-428d-84d6-84bdd4b07206-catalog-content\") pod \"community-operators-59lqh\" (UID: \"33a6dd30-3ecb-428d-84d6-84bdd4b07206\") " pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.179752 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33a6dd30-3ecb-428d-84d6-84bdd4b07206-utilities\") pod \"community-operators-59lqh\" (UID: \"33a6dd30-3ecb-428d-84d6-84bdd4b07206\") " pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.205365 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxx2g\" (UniqueName: \"kubernetes.io/projected/33a6dd30-3ecb-428d-84d6-84bdd4b07206-kube-api-access-dxx2g\") pod \"community-operators-59lqh\" (UID: \"33a6dd30-3ecb-428d-84d6-84bdd4b07206\") " pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.205993 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-gnl4x_0d72b6fb-7ab2-4a48-bdca-17f2794daf3e/kube-rbac-proxy/0.log" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.207651 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-gnl4x_0d72b6fb-7ab2-4a48-bdca-17f2794daf3e/manager/0.log" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.324495 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.470842 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-qh5qm_57034fef-009f-4593-92cb-67cdde94f9e0/kube-rbac-proxy/0.log" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.837273 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-qh5qm_57034fef-009f-4593-92cb-67cdde94f9e0/manager/0.log" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.909472 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-nhklg_5603e150-66d0-4016-a762-fd636f211c11/kube-rbac-proxy/0.log" Dec 02 15:50:51 crc kubenswrapper[4902]: I1202 15:50:51.973649 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-59lqh"] Dec 02 15:50:52 crc kubenswrapper[4902]: I1202 15:50:52.032282 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-nhklg_5603e150-66d0-4016-a762-fd636f211c11/manager/0.log" Dec 02 15:50:52 crc kubenswrapper[4902]: I1202 15:50:52.183966 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-87z5d_a056d211-44a9-4585-bbf0-fc0413a57099/kube-rbac-proxy/0.log" Dec 02 15:50:52 crc kubenswrapper[4902]: I1202 15:50:52.217886 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-87z5d_a056d211-44a9-4585-bbf0-fc0413a57099/manager/0.log" Dec 02 15:50:52 crc kubenswrapper[4902]: I1202 15:50:52.280257 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r_05625664-e0a8-4c8c-904e-69e4b3b7df9b/kube-rbac-proxy/0.log" Dec 02 15:50:52 crc kubenswrapper[4902]: I1202 15:50:52.360091 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4xgl9r_05625664-e0a8-4c8c-904e-69e4b3b7df9b/manager/0.log" Dec 02 15:50:52 crc kubenswrapper[4902]: I1202 15:50:52.625802 4902 generic.go:334] "Generic (PLEG): container finished" podID="33a6dd30-3ecb-428d-84d6-84bdd4b07206" containerID="d106edc047b0e1a11f34dc6299250cb3644a4b340fc0360269615782041594cb" exitCode=0 Dec 02 15:50:52 crc kubenswrapper[4902]: I1202 15:50:52.626125 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-59lqh" event={"ID":"33a6dd30-3ecb-428d-84d6-84bdd4b07206","Type":"ContainerDied","Data":"d106edc047b0e1a11f34dc6299250cb3644a4b340fc0360269615782041594cb"} Dec 02 15:50:52 crc kubenswrapper[4902]: I1202 15:50:52.626152 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-59lqh" event={"ID":"33a6dd30-3ecb-428d-84d6-84bdd4b07206","Type":"ContainerStarted","Data":"c153644d9061d46bab2402dfff49700ce14d0226ad2e7d7c09a305974864c644"} Dec 02 15:50:52 crc kubenswrapper[4902]: I1202 15:50:52.629037 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 15:50:52 crc kubenswrapper[4902]: I1202 15:50:52.703132 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-jw4ms_5a2a1e6b-0712-4b08-8dca-36da17c57d51/registry-server/0.log" Dec 02 15:50:52 crc kubenswrapper[4902]: I1202 15:50:52.789313 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-fd5ddc88-7hv7g_4cdeb940-6336-43a7-bec3-bf831b83bce7/operator/0.log" Dec 02 15:50:52 crc kubenswrapper[4902]: I1202 15:50:52.862502 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-pnphg_3838a55d-af10-4de8-ad85-bdbd8e49ed62/kube-rbac-proxy/0.log" Dec 02 15:50:53 crc kubenswrapper[4902]: I1202 15:50:53.069308 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-pnphg_3838a55d-af10-4de8-ad85-bdbd8e49ed62/manager/0.log" Dec 02 15:50:53 crc kubenswrapper[4902]: I1202 15:50:53.314454 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-ghmml_437c097d-1c14-4668-a3c6-86802ed4a253/manager/0.log" Dec 02 15:50:53 crc kubenswrapper[4902]: I1202 15:50:53.353182 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-ghmml_437c097d-1c14-4668-a3c6-86802ed4a253/kube-rbac-proxy/0.log" Dec 02 15:50:53 crc kubenswrapper[4902]: I1202 15:50:53.534114 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-575d4674bc-b4fl5_d319072a-bef8-4511-a876-dc7c6e59817e/manager/0.log" Dec 02 15:50:53 crc kubenswrapper[4902]: I1202 15:50:53.634371 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-gnd6s_a4650041-03e0-4119-b353-281b9200355c/operator/0.log" Dec 02 15:50:53 crc kubenswrapper[4902]: I1202 15:50:53.698216 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-cdphg_fc37cd76-791d-468a-9470-da5138c96d34/kube-rbac-proxy/0.log" Dec 02 15:50:53 crc kubenswrapper[4902]: I1202 15:50:53.774739 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-cdphg_fc37cd76-791d-468a-9470-da5138c96d34/manager/0.log" Dec 02 15:50:53 crc kubenswrapper[4902]: I1202 15:50:53.825283 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-tjm2s_a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c/kube-rbac-proxy/0.log" Dec 02 15:50:53 crc kubenswrapper[4902]: I1202 15:50:53.978969 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-mhdgs_dd8c8a81-b5af-4a0c-8640-f4455c09abc1/kube-rbac-proxy/0.log" Dec 02 15:50:54 crc kubenswrapper[4902]: I1202 15:50:54.012997 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-tjm2s_a1a6b2b5-e4bc-4a60-a878-7d60c17f1d1c/manager/0.log" Dec 02 15:50:54 crc kubenswrapper[4902]: I1202 15:50:54.083901 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-mhdgs_dd8c8a81-b5af-4a0c-8640-f4455c09abc1/manager/0.log" Dec 02 15:50:54 crc kubenswrapper[4902]: I1202 15:50:54.234310 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-64fcb99cdb-nkttd_0cbcce3d-353f-44fc-896e-75c3a8b58c21/kube-rbac-proxy/0.log" Dec 02 15:50:54 crc kubenswrapper[4902]: I1202 15:50:54.598490 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-64fcb99cdb-nkttd_0cbcce3d-353f-44fc-896e-75c3a8b58c21/manager/0.log" Dec 02 15:50:54 crc kubenswrapper[4902]: I1202 15:50:54.650673 4902 generic.go:334] "Generic (PLEG): container finished" podID="33a6dd30-3ecb-428d-84d6-84bdd4b07206" containerID="93eecc8b5af6a1881fe7f5ed507de9f20db2804336b8c3046b20d0f34d17681c" exitCode=0 Dec 02 15:50:54 crc kubenswrapper[4902]: I1202 15:50:54.650718 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-59lqh" event={"ID":"33a6dd30-3ecb-428d-84d6-84bdd4b07206","Type":"ContainerDied","Data":"93eecc8b5af6a1881fe7f5ed507de9f20db2804336b8c3046b20d0f34d17681c"} Dec 02 15:50:55 crc kubenswrapper[4902]: I1202 15:50:55.662948 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-59lqh" event={"ID":"33a6dd30-3ecb-428d-84d6-84bdd4b07206","Type":"ContainerStarted","Data":"db0e40e52a373c49a80b6680f1ad3d628aa89438b6e9e18595f5e3b18e9fe5eb"} Dec 02 15:50:55 crc kubenswrapper[4902]: I1202 15:50:55.684228 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-59lqh" podStartSLOduration=3.253780847 podStartE2EDuration="5.684203731s" podCreationTimestamp="2025-12-02 15:50:50 +0000 UTC" firstStartedPulling="2025-12-02 15:50:52.628461763 +0000 UTC m=+5683.819770472" lastFinishedPulling="2025-12-02 15:50:55.058884647 +0000 UTC m=+5686.250193356" observedRunningTime="2025-12-02 15:50:55.680089164 +0000 UTC m=+5686.871397883" watchObservedRunningTime="2025-12-02 15:50:55.684203731 +0000 UTC m=+5686.875512440" Dec 02 15:51:01 crc kubenswrapper[4902]: I1202 15:51:01.325176 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:51:01 crc kubenswrapper[4902]: I1202 15:51:01.325585 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:51:01 crc kubenswrapper[4902]: I1202 15:51:01.378819 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:51:01 crc kubenswrapper[4902]: I1202 15:51:01.765503 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:51:02 crc kubenswrapper[4902]: I1202 15:51:02.977294 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-59lqh"] Dec 02 15:51:03 crc kubenswrapper[4902]: I1202 15:51:03.736892 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-59lqh" podUID="33a6dd30-3ecb-428d-84d6-84bdd4b07206" containerName="registry-server" containerID="cri-o://db0e40e52a373c49a80b6680f1ad3d628aa89438b6e9e18595f5e3b18e9fe5eb" gracePeriod=2 Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.305791 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.333056 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxx2g\" (UniqueName: \"kubernetes.io/projected/33a6dd30-3ecb-428d-84d6-84bdd4b07206-kube-api-access-dxx2g\") pod \"33a6dd30-3ecb-428d-84d6-84bdd4b07206\" (UID: \"33a6dd30-3ecb-428d-84d6-84bdd4b07206\") " Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.333109 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33a6dd30-3ecb-428d-84d6-84bdd4b07206-catalog-content\") pod \"33a6dd30-3ecb-428d-84d6-84bdd4b07206\" (UID: \"33a6dd30-3ecb-428d-84d6-84bdd4b07206\") " Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.333344 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33a6dd30-3ecb-428d-84d6-84bdd4b07206-utilities\") pod \"33a6dd30-3ecb-428d-84d6-84bdd4b07206\" (UID: \"33a6dd30-3ecb-428d-84d6-84bdd4b07206\") " Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.334249 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33a6dd30-3ecb-428d-84d6-84bdd4b07206-utilities" (OuterVolumeSpecName: "utilities") pod "33a6dd30-3ecb-428d-84d6-84bdd4b07206" (UID: "33a6dd30-3ecb-428d-84d6-84bdd4b07206"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.360968 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33a6dd30-3ecb-428d-84d6-84bdd4b07206-kube-api-access-dxx2g" (OuterVolumeSpecName: "kube-api-access-dxx2g") pod "33a6dd30-3ecb-428d-84d6-84bdd4b07206" (UID: "33a6dd30-3ecb-428d-84d6-84bdd4b07206"). InnerVolumeSpecName "kube-api-access-dxx2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.435391 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33a6dd30-3ecb-428d-84d6-84bdd4b07206-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.435436 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxx2g\" (UniqueName: \"kubernetes.io/projected/33a6dd30-3ecb-428d-84d6-84bdd4b07206-kube-api-access-dxx2g\") on node \"crc\" DevicePath \"\"" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.441963 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33a6dd30-3ecb-428d-84d6-84bdd4b07206-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "33a6dd30-3ecb-428d-84d6-84bdd4b07206" (UID: "33a6dd30-3ecb-428d-84d6-84bdd4b07206"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.537144 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33a6dd30-3ecb-428d-84d6-84bdd4b07206-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.747876 4902 generic.go:334] "Generic (PLEG): container finished" podID="33a6dd30-3ecb-428d-84d6-84bdd4b07206" containerID="db0e40e52a373c49a80b6680f1ad3d628aa89438b6e9e18595f5e3b18e9fe5eb" exitCode=0 Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.747914 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-59lqh" event={"ID":"33a6dd30-3ecb-428d-84d6-84bdd4b07206","Type":"ContainerDied","Data":"db0e40e52a373c49a80b6680f1ad3d628aa89438b6e9e18595f5e3b18e9fe5eb"} Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.747918 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-59lqh" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.747938 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-59lqh" event={"ID":"33a6dd30-3ecb-428d-84d6-84bdd4b07206","Type":"ContainerDied","Data":"c153644d9061d46bab2402dfff49700ce14d0226ad2e7d7c09a305974864c644"} Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.747952 4902 scope.go:117] "RemoveContainer" containerID="db0e40e52a373c49a80b6680f1ad3d628aa89438b6e9e18595f5e3b18e9fe5eb" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.770417 4902 scope.go:117] "RemoveContainer" containerID="93eecc8b5af6a1881fe7f5ed507de9f20db2804336b8c3046b20d0f34d17681c" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.783055 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-59lqh"] Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.791962 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-59lqh"] Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.809812 4902 scope.go:117] "RemoveContainer" containerID="d106edc047b0e1a11f34dc6299250cb3644a4b340fc0360269615782041594cb" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.849252 4902 scope.go:117] "RemoveContainer" containerID="db0e40e52a373c49a80b6680f1ad3d628aa89438b6e9e18595f5e3b18e9fe5eb" Dec 02 15:51:04 crc kubenswrapper[4902]: E1202 15:51:04.851766 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db0e40e52a373c49a80b6680f1ad3d628aa89438b6e9e18595f5e3b18e9fe5eb\": container with ID starting with db0e40e52a373c49a80b6680f1ad3d628aa89438b6e9e18595f5e3b18e9fe5eb not found: ID does not exist" containerID="db0e40e52a373c49a80b6680f1ad3d628aa89438b6e9e18595f5e3b18e9fe5eb" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.851799 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db0e40e52a373c49a80b6680f1ad3d628aa89438b6e9e18595f5e3b18e9fe5eb"} err="failed to get container status \"db0e40e52a373c49a80b6680f1ad3d628aa89438b6e9e18595f5e3b18e9fe5eb\": rpc error: code = NotFound desc = could not find container \"db0e40e52a373c49a80b6680f1ad3d628aa89438b6e9e18595f5e3b18e9fe5eb\": container with ID starting with db0e40e52a373c49a80b6680f1ad3d628aa89438b6e9e18595f5e3b18e9fe5eb not found: ID does not exist" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.851819 4902 scope.go:117] "RemoveContainer" containerID="93eecc8b5af6a1881fe7f5ed507de9f20db2804336b8c3046b20d0f34d17681c" Dec 02 15:51:04 crc kubenswrapper[4902]: E1202 15:51:04.852092 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93eecc8b5af6a1881fe7f5ed507de9f20db2804336b8c3046b20d0f34d17681c\": container with ID starting with 93eecc8b5af6a1881fe7f5ed507de9f20db2804336b8c3046b20d0f34d17681c not found: ID does not exist" containerID="93eecc8b5af6a1881fe7f5ed507de9f20db2804336b8c3046b20d0f34d17681c" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.852111 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93eecc8b5af6a1881fe7f5ed507de9f20db2804336b8c3046b20d0f34d17681c"} err="failed to get container status \"93eecc8b5af6a1881fe7f5ed507de9f20db2804336b8c3046b20d0f34d17681c\": rpc error: code = NotFound desc = could not find container \"93eecc8b5af6a1881fe7f5ed507de9f20db2804336b8c3046b20d0f34d17681c\": container with ID starting with 93eecc8b5af6a1881fe7f5ed507de9f20db2804336b8c3046b20d0f34d17681c not found: ID does not exist" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.852125 4902 scope.go:117] "RemoveContainer" containerID="d106edc047b0e1a11f34dc6299250cb3644a4b340fc0360269615782041594cb" Dec 02 15:51:04 crc kubenswrapper[4902]: E1202 15:51:04.852300 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d106edc047b0e1a11f34dc6299250cb3644a4b340fc0360269615782041594cb\": container with ID starting with d106edc047b0e1a11f34dc6299250cb3644a4b340fc0360269615782041594cb not found: ID does not exist" containerID="d106edc047b0e1a11f34dc6299250cb3644a4b340fc0360269615782041594cb" Dec 02 15:51:04 crc kubenswrapper[4902]: I1202 15:51:04.852319 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d106edc047b0e1a11f34dc6299250cb3644a4b340fc0360269615782041594cb"} err="failed to get container status \"d106edc047b0e1a11f34dc6299250cb3644a4b340fc0360269615782041594cb\": rpc error: code = NotFound desc = could not find container \"d106edc047b0e1a11f34dc6299250cb3644a4b340fc0360269615782041594cb\": container with ID starting with d106edc047b0e1a11f34dc6299250cb3644a4b340fc0360269615782041594cb not found: ID does not exist" Dec 02 15:51:05 crc kubenswrapper[4902]: I1202 15:51:05.122528 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33a6dd30-3ecb-428d-84d6-84bdd4b07206" path="/var/lib/kubelet/pods/33a6dd30-3ecb-428d-84d6-84bdd4b07206/volumes" Dec 02 15:51:14 crc kubenswrapper[4902]: I1202 15:51:14.301591 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-rxhgz_07de0872-46f2-4a69-af4c-a811e7ee3a8d/control-plane-machine-set-operator/0.log" Dec 02 15:51:14 crc kubenswrapper[4902]: I1202 15:51:14.484688 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-mxmxt_93abc37a-cc70-4b86-bd3f-4d3945d029f4/machine-api-operator/0.log" Dec 02 15:51:14 crc kubenswrapper[4902]: I1202 15:51:14.516967 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-mxmxt_93abc37a-cc70-4b86-bd3f-4d3945d029f4/kube-rbac-proxy/0.log" Dec 02 15:51:27 crc kubenswrapper[4902]: I1202 15:51:27.036319 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-c8sjz_0c33663a-0d99-4a94-bb43-46f102098870/cert-manager-controller/0.log" Dec 02 15:51:27 crc kubenswrapper[4902]: I1202 15:51:27.194465 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-bj9d5_c4d2337b-c3b1-4759-999d-ab47b754a817/cert-manager-cainjector/0.log" Dec 02 15:51:27 crc kubenswrapper[4902]: I1202 15:51:27.240345 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-wvs9t_1b016533-08b9-464a-8956-1c236d28e036/cert-manager-webhook/0.log" Dec 02 15:51:40 crc kubenswrapper[4902]: I1202 15:51:40.463843 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-7nbg8_6382b3e8-8f0e-421b-9612-0a398fd0f994/nmstate-console-plugin/0.log" Dec 02 15:51:40 crc kubenswrapper[4902]: I1202 15:51:40.611128 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-c2qjr_4d54d7c2-5d4a-41b5-9949-96b8ab11df5e/nmstate-handler/0.log" Dec 02 15:51:40 crc kubenswrapper[4902]: I1202 15:51:40.647441 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-zvsm4_ad4e56ef-44e2-46c9-a609-808e4c96fa2e/nmstate-metrics/0.log" Dec 02 15:51:40 crc kubenswrapper[4902]: I1202 15:51:40.664254 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-zvsm4_ad4e56ef-44e2-46c9-a609-808e4c96fa2e/kube-rbac-proxy/0.log" Dec 02 15:51:40 crc kubenswrapper[4902]: I1202 15:51:40.814320 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-hpnm5_8d059e00-4b87-410a-8613-a52e1263dc9f/nmstate-operator/0.log" Dec 02 15:51:40 crc kubenswrapper[4902]: I1202 15:51:40.888137 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-8g56t_b31a81b9-0e67-4858-ae54-304c17fd0495/nmstate-webhook/0.log" Dec 02 15:51:55 crc kubenswrapper[4902]: I1202 15:51:55.126365 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-fvt5m_673f9521-fb68-4ec8-9190-cf0315b14280/kube-rbac-proxy/0.log" Dec 02 15:51:55 crc kubenswrapper[4902]: I1202 15:51:55.206605 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-fvt5m_673f9521-fb68-4ec8-9190-cf0315b14280/controller/0.log" Dec 02 15:51:55 crc kubenswrapper[4902]: I1202 15:51:55.313209 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-frr-files/0.log" Dec 02 15:51:55 crc kubenswrapper[4902]: I1202 15:51:55.508158 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-metrics/0.log" Dec 02 15:51:55 crc kubenswrapper[4902]: I1202 15:51:55.508442 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-reloader/0.log" Dec 02 15:51:55 crc kubenswrapper[4902]: I1202 15:51:55.519854 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-reloader/0.log" Dec 02 15:51:55 crc kubenswrapper[4902]: I1202 15:51:55.564606 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-frr-files/0.log" Dec 02 15:51:55 crc kubenswrapper[4902]: I1202 15:51:55.723135 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-frr-files/0.log" Dec 02 15:51:55 crc kubenswrapper[4902]: I1202 15:51:55.743707 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-metrics/0.log" Dec 02 15:51:55 crc kubenswrapper[4902]: I1202 15:51:55.751763 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-reloader/0.log" Dec 02 15:51:55 crc kubenswrapper[4902]: I1202 15:51:55.791696 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-metrics/0.log" Dec 02 15:51:55 crc kubenswrapper[4902]: I1202 15:51:55.978176 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-frr-files/0.log" Dec 02 15:51:55 crc kubenswrapper[4902]: I1202 15:51:55.995006 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-reloader/0.log" Dec 02 15:51:56 crc kubenswrapper[4902]: I1202 15:51:56.004278 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/cp-metrics/0.log" Dec 02 15:51:56 crc kubenswrapper[4902]: I1202 15:51:56.014031 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/controller/0.log" Dec 02 15:51:56 crc kubenswrapper[4902]: I1202 15:51:56.181571 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/frr-metrics/0.log" Dec 02 15:51:56 crc kubenswrapper[4902]: I1202 15:51:56.189874 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/kube-rbac-proxy-frr/0.log" Dec 02 15:51:56 crc kubenswrapper[4902]: I1202 15:51:56.216844 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/kube-rbac-proxy/0.log" Dec 02 15:51:56 crc kubenswrapper[4902]: I1202 15:51:56.399513 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/reloader/0.log" Dec 02 15:51:56 crc kubenswrapper[4902]: I1202 15:51:56.445610 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-p7rgx_e15d73a9-a10f-40b9-8a07-ae14a383d2ba/frr-k8s-webhook-server/0.log" Dec 02 15:51:56 crc kubenswrapper[4902]: I1202 15:51:56.668379 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-644fb8ffcc-frgm7_b75cd83c-20e7-42ea-a0f5-be6d28430a2e/manager/0.log" Dec 02 15:51:56 crc kubenswrapper[4902]: I1202 15:51:56.859000 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-56874848dd-bf6qw_25920d0f-ef42-4402-82a1-c643307041f5/webhook-server/0.log" Dec 02 15:51:56 crc kubenswrapper[4902]: I1202 15:51:56.993553 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-rk7ql_4650d7ad-6b89-444d-a21d-3fad55e3a7b1/kube-rbac-proxy/0.log" Dec 02 15:51:57 crc kubenswrapper[4902]: I1202 15:51:57.613618 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-rk7ql_4650d7ad-6b89-444d-a21d-3fad55e3a7b1/speaker/0.log" Dec 02 15:51:57 crc kubenswrapper[4902]: I1202 15:51:57.871504 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2rnt6_b4b0b031-e720-4ff3-9c48-545f116b5473/frr/0.log" Dec 02 15:52:04 crc kubenswrapper[4902]: I1202 15:52:04.731419 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:52:04 crc kubenswrapper[4902]: I1202 15:52:04.732044 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:52:09 crc kubenswrapper[4902]: I1202 15:52:09.967899 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl_9abb66fe-7d84-40db-981f-b19ac735c12a/util/0.log" Dec 02 15:52:10 crc kubenswrapper[4902]: I1202 15:52:10.232866 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl_9abb66fe-7d84-40db-981f-b19ac735c12a/util/0.log" Dec 02 15:52:10 crc kubenswrapper[4902]: I1202 15:52:10.270702 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl_9abb66fe-7d84-40db-981f-b19ac735c12a/pull/0.log" Dec 02 15:52:10 crc kubenswrapper[4902]: I1202 15:52:10.313925 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl_9abb66fe-7d84-40db-981f-b19ac735c12a/pull/0.log" Dec 02 15:52:10 crc kubenswrapper[4902]: I1202 15:52:10.433810 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl_9abb66fe-7d84-40db-981f-b19ac735c12a/extract/0.log" Dec 02 15:52:10 crc kubenswrapper[4902]: I1202 15:52:10.475145 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl_9abb66fe-7d84-40db-981f-b19ac735c12a/util/0.log" Dec 02 15:52:10 crc kubenswrapper[4902]: I1202 15:52:10.488314 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fqrbjl_9abb66fe-7d84-40db-981f-b19ac735c12a/pull/0.log" Dec 02 15:52:10 crc kubenswrapper[4902]: I1202 15:52:10.629156 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m_462c708d-094f-4814-a61f-a94b5b493956/util/0.log" Dec 02 15:52:10 crc kubenswrapper[4902]: I1202 15:52:10.842200 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m_462c708d-094f-4814-a61f-a94b5b493956/util/0.log" Dec 02 15:52:10 crc kubenswrapper[4902]: I1202 15:52:10.878777 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m_462c708d-094f-4814-a61f-a94b5b493956/pull/0.log" Dec 02 15:52:10 crc kubenswrapper[4902]: I1202 15:52:10.879649 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m_462c708d-094f-4814-a61f-a94b5b493956/pull/0.log" Dec 02 15:52:11 crc kubenswrapper[4902]: I1202 15:52:11.043205 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m_462c708d-094f-4814-a61f-a94b5b493956/pull/0.log" Dec 02 15:52:11 crc kubenswrapper[4902]: I1202 15:52:11.092285 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m_462c708d-094f-4814-a61f-a94b5b493956/extract/0.log" Dec 02 15:52:11 crc kubenswrapper[4902]: I1202 15:52:11.100511 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210p2x2m_462c708d-094f-4814-a61f-a94b5b493956/util/0.log" Dec 02 15:52:11 crc kubenswrapper[4902]: I1202 15:52:11.409646 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6_b5fb7258-c7bd-4d20-b7a8-d636f970a8ac/util/0.log" Dec 02 15:52:11 crc kubenswrapper[4902]: I1202 15:52:11.574096 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6_b5fb7258-c7bd-4d20-b7a8-d636f970a8ac/util/0.log" Dec 02 15:52:11 crc kubenswrapper[4902]: I1202 15:52:11.610629 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6_b5fb7258-c7bd-4d20-b7a8-d636f970a8ac/pull/0.log" Dec 02 15:52:11 crc kubenswrapper[4902]: I1202 15:52:11.631848 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6_b5fb7258-c7bd-4d20-b7a8-d636f970a8ac/pull/0.log" Dec 02 15:52:11 crc kubenswrapper[4902]: I1202 15:52:11.729282 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6_b5fb7258-c7bd-4d20-b7a8-d636f970a8ac/util/0.log" Dec 02 15:52:11 crc kubenswrapper[4902]: I1202 15:52:11.759823 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6_b5fb7258-c7bd-4d20-b7a8-d636f970a8ac/pull/0.log" Dec 02 15:52:11 crc kubenswrapper[4902]: I1202 15:52:11.825607 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zczg6_b5fb7258-c7bd-4d20-b7a8-d636f970a8ac/extract/0.log" Dec 02 15:52:11 crc kubenswrapper[4902]: I1202 15:52:11.926648 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2hb48_5a130427-ec6d-4743-97a9-42bb4cb308ca/extract-utilities/0.log" Dec 02 15:52:12 crc kubenswrapper[4902]: I1202 15:52:12.140209 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2hb48_5a130427-ec6d-4743-97a9-42bb4cb308ca/extract-content/0.log" Dec 02 15:52:12 crc kubenswrapper[4902]: I1202 15:52:12.165871 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2hb48_5a130427-ec6d-4743-97a9-42bb4cb308ca/extract-utilities/0.log" Dec 02 15:52:12 crc kubenswrapper[4902]: I1202 15:52:12.192614 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2hb48_5a130427-ec6d-4743-97a9-42bb4cb308ca/extract-content/0.log" Dec 02 15:52:12 crc kubenswrapper[4902]: I1202 15:52:12.343867 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2hb48_5a130427-ec6d-4743-97a9-42bb4cb308ca/extract-utilities/0.log" Dec 02 15:52:12 crc kubenswrapper[4902]: I1202 15:52:12.404521 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2hb48_5a130427-ec6d-4743-97a9-42bb4cb308ca/extract-content/0.log" Dec 02 15:52:12 crc kubenswrapper[4902]: I1202 15:52:12.598250 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wcqbz_22c1a675-a060-4372-ab73-1c25c3f3fa70/extract-utilities/0.log" Dec 02 15:52:12 crc kubenswrapper[4902]: I1202 15:52:12.825918 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wcqbz_22c1a675-a060-4372-ab73-1c25c3f3fa70/extract-utilities/0.log" Dec 02 15:52:12 crc kubenswrapper[4902]: I1202 15:52:12.867385 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wcqbz_22c1a675-a060-4372-ab73-1c25c3f3fa70/extract-content/0.log" Dec 02 15:52:12 crc kubenswrapper[4902]: I1202 15:52:12.902166 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wcqbz_22c1a675-a060-4372-ab73-1c25c3f3fa70/extract-content/0.log" Dec 02 15:52:13 crc kubenswrapper[4902]: I1202 15:52:13.085711 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wcqbz_22c1a675-a060-4372-ab73-1c25c3f3fa70/extract-content/0.log" Dec 02 15:52:13 crc kubenswrapper[4902]: I1202 15:52:13.123269 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wcqbz_22c1a675-a060-4372-ab73-1c25c3f3fa70/extract-utilities/0.log" Dec 02 15:52:13 crc kubenswrapper[4902]: I1202 15:52:13.343285 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2hb48_5a130427-ec6d-4743-97a9-42bb4cb308ca/registry-server/0.log" Dec 02 15:52:13 crc kubenswrapper[4902]: I1202 15:52:13.347392 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vp2bb_bfa7e05a-f673-4432-a9c5-e33f67589a10/marketplace-operator/0.log" Dec 02 15:52:13 crc kubenswrapper[4902]: I1202 15:52:13.627473 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wmg9h_d366d244-dcc5-4685-9ee5-73990c8d6cb6/extract-utilities/0.log" Dec 02 15:52:13 crc kubenswrapper[4902]: I1202 15:52:13.938800 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wmg9h_d366d244-dcc5-4685-9ee5-73990c8d6cb6/extract-utilities/0.log" Dec 02 15:52:13 crc kubenswrapper[4902]: I1202 15:52:13.957139 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wmg9h_d366d244-dcc5-4685-9ee5-73990c8d6cb6/extract-content/0.log" Dec 02 15:52:13 crc kubenswrapper[4902]: I1202 15:52:13.997636 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wmg9h_d366d244-dcc5-4685-9ee5-73990c8d6cb6/extract-content/0.log" Dec 02 15:52:14 crc kubenswrapper[4902]: I1202 15:52:14.123041 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wcqbz_22c1a675-a060-4372-ab73-1c25c3f3fa70/registry-server/0.log" Dec 02 15:52:14 crc kubenswrapper[4902]: I1202 15:52:14.176617 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wmg9h_d366d244-dcc5-4685-9ee5-73990c8d6cb6/extract-utilities/0.log" Dec 02 15:52:14 crc kubenswrapper[4902]: I1202 15:52:14.191870 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wmg9h_d366d244-dcc5-4685-9ee5-73990c8d6cb6/extract-content/0.log" Dec 02 15:52:14 crc kubenswrapper[4902]: I1202 15:52:14.392898 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q46bc_da31f203-4e88-423f-b7c4-320cbd35f00f/extract-utilities/0.log" Dec 02 15:52:14 crc kubenswrapper[4902]: I1202 15:52:14.525616 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wmg9h_d366d244-dcc5-4685-9ee5-73990c8d6cb6/registry-server/0.log" Dec 02 15:52:14 crc kubenswrapper[4902]: I1202 15:52:14.614148 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q46bc_da31f203-4e88-423f-b7c4-320cbd35f00f/extract-utilities/0.log" Dec 02 15:52:14 crc kubenswrapper[4902]: I1202 15:52:14.640640 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q46bc_da31f203-4e88-423f-b7c4-320cbd35f00f/extract-content/0.log" Dec 02 15:52:14 crc kubenswrapper[4902]: I1202 15:52:14.659685 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q46bc_da31f203-4e88-423f-b7c4-320cbd35f00f/extract-content/0.log" Dec 02 15:52:14 crc kubenswrapper[4902]: I1202 15:52:14.798374 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q46bc_da31f203-4e88-423f-b7c4-320cbd35f00f/extract-content/0.log" Dec 02 15:52:14 crc kubenswrapper[4902]: I1202 15:52:14.821886 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q46bc_da31f203-4e88-423f-b7c4-320cbd35f00f/extract-utilities/0.log" Dec 02 15:52:14 crc kubenswrapper[4902]: I1202 15:52:14.906427 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q46bc_da31f203-4e88-423f-b7c4-320cbd35f00f/registry-server/0.log" Dec 02 15:52:27 crc kubenswrapper[4902]: I1202 15:52:27.004141 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-sbwhh_cd642e4b-b7fa-42d6-a4ca-629c74ff1f5e/prometheus-operator/0.log" Dec 02 15:52:27 crc kubenswrapper[4902]: I1202 15:52:27.232347 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-698f669544-qdwg6_b411880c-1f14-41da-bbc5-85543ddf20d7/prometheus-operator-admission-webhook/0.log" Dec 02 15:52:27 crc kubenswrapper[4902]: I1202 15:52:27.275764 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-698f669544-chdpg_8b03406b-b481-4193-b543-a1f91deefefd/prometheus-operator-admission-webhook/0.log" Dec 02 15:52:27 crc kubenswrapper[4902]: I1202 15:52:27.455441 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-tghq9_c7fe3eed-6ecf-406a-9552-4f2a601eb860/operator/0.log" Dec 02 15:52:27 crc kubenswrapper[4902]: I1202 15:52:27.505938 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-hwvtn_f7d12c85-7736-436e-a273-03025b1fc05b/perses-operator/0.log" Dec 02 15:52:34 crc kubenswrapper[4902]: I1202 15:52:34.731248 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:52:34 crc kubenswrapper[4902]: I1202 15:52:34.731848 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:53:04 crc kubenswrapper[4902]: I1202 15:53:04.731895 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:53:04 crc kubenswrapper[4902]: I1202 15:53:04.732398 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:53:04 crc kubenswrapper[4902]: I1202 15:53:04.732443 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 15:53:04 crc kubenswrapper[4902]: I1202 15:53:04.733257 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d4b452993d28c19f8db0d9055f4cfbc1fd8767ae2c03f2cd2b0edb49844d2e3c"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 15:53:04 crc kubenswrapper[4902]: I1202 15:53:04.733325 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://d4b452993d28c19f8db0d9055f4cfbc1fd8767ae2c03f2cd2b0edb49844d2e3c" gracePeriod=600 Dec 02 15:53:04 crc kubenswrapper[4902]: I1202 15:53:04.918421 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="d4b452993d28c19f8db0d9055f4cfbc1fd8767ae2c03f2cd2b0edb49844d2e3c" exitCode=0 Dec 02 15:53:04 crc kubenswrapper[4902]: I1202 15:53:04.918518 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"d4b452993d28c19f8db0d9055f4cfbc1fd8767ae2c03f2cd2b0edb49844d2e3c"} Dec 02 15:53:04 crc kubenswrapper[4902]: I1202 15:53:04.918586 4902 scope.go:117] "RemoveContainer" containerID="397f3d9e57b49c20ab5c25b006e44542e932d36c2fec716458f1966f91033905" Dec 02 15:53:05 crc kubenswrapper[4902]: I1202 15:53:05.929987 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerStarted","Data":"efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7"} Dec 02 15:54:20 crc kubenswrapper[4902]: I1202 15:54:20.825190 4902 generic.go:334] "Generic (PLEG): container finished" podID="910db312-d28e-43b2-b759-88886bd7d7a7" containerID="3cc83d93c0d1b184af830e105f1a01111ad3aa89b9057baa63b8515aa64b70b1" exitCode=0 Dec 02 15:54:20 crc kubenswrapper[4902]: I1202 15:54:20.825286 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9kwz5/must-gather-9whft" event={"ID":"910db312-d28e-43b2-b759-88886bd7d7a7","Type":"ContainerDied","Data":"3cc83d93c0d1b184af830e105f1a01111ad3aa89b9057baa63b8515aa64b70b1"} Dec 02 15:54:20 crc kubenswrapper[4902]: I1202 15:54:20.826824 4902 scope.go:117] "RemoveContainer" containerID="3cc83d93c0d1b184af830e105f1a01111ad3aa89b9057baa63b8515aa64b70b1" Dec 02 15:54:21 crc kubenswrapper[4902]: I1202 15:54:21.821954 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9kwz5_must-gather-9whft_910db312-d28e-43b2-b759-88886bd7d7a7/gather/0.log" Dec 02 15:54:34 crc kubenswrapper[4902]: I1202 15:54:34.717453 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9kwz5/must-gather-9whft"] Dec 02 15:54:34 crc kubenswrapper[4902]: I1202 15:54:34.718298 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-9kwz5/must-gather-9whft" podUID="910db312-d28e-43b2-b759-88886bd7d7a7" containerName="copy" containerID="cri-o://d755351f80049ec6e022c1ec6f603ed1ab557d0f79ddbd3642fb2dbf2512ff9d" gracePeriod=2 Dec 02 15:54:34 crc kubenswrapper[4902]: I1202 15:54:34.730223 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9kwz5/must-gather-9whft"] Dec 02 15:54:34 crc kubenswrapper[4902]: I1202 15:54:34.975843 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9kwz5_must-gather-9whft_910db312-d28e-43b2-b759-88886bd7d7a7/copy/0.log" Dec 02 15:54:34 crc kubenswrapper[4902]: I1202 15:54:34.976802 4902 generic.go:334] "Generic (PLEG): container finished" podID="910db312-d28e-43b2-b759-88886bd7d7a7" containerID="d755351f80049ec6e022c1ec6f603ed1ab557d0f79ddbd3642fb2dbf2512ff9d" exitCode=143 Dec 02 15:54:35 crc kubenswrapper[4902]: I1202 15:54:35.139005 4902 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9kwz5_must-gather-9whft_910db312-d28e-43b2-b759-88886bd7d7a7/copy/0.log" Dec 02 15:54:35 crc kubenswrapper[4902]: I1202 15:54:35.139603 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/must-gather-9whft" Dec 02 15:54:35 crc kubenswrapper[4902]: I1202 15:54:35.248385 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/910db312-d28e-43b2-b759-88886bd7d7a7-must-gather-output\") pod \"910db312-d28e-43b2-b759-88886bd7d7a7\" (UID: \"910db312-d28e-43b2-b759-88886bd7d7a7\") " Dec 02 15:54:35 crc kubenswrapper[4902]: I1202 15:54:35.248519 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rshrt\" (UniqueName: \"kubernetes.io/projected/910db312-d28e-43b2-b759-88886bd7d7a7-kube-api-access-rshrt\") pod \"910db312-d28e-43b2-b759-88886bd7d7a7\" (UID: \"910db312-d28e-43b2-b759-88886bd7d7a7\") " Dec 02 15:54:35 crc kubenswrapper[4902]: I1202 15:54:35.260938 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/910db312-d28e-43b2-b759-88886bd7d7a7-kube-api-access-rshrt" (OuterVolumeSpecName: "kube-api-access-rshrt") pod "910db312-d28e-43b2-b759-88886bd7d7a7" (UID: "910db312-d28e-43b2-b759-88886bd7d7a7"). InnerVolumeSpecName "kube-api-access-rshrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:54:35 crc kubenswrapper[4902]: I1202 15:54:35.351535 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rshrt\" (UniqueName: \"kubernetes.io/projected/910db312-d28e-43b2-b759-88886bd7d7a7-kube-api-access-rshrt\") on node \"crc\" DevicePath \"\"" Dec 02 15:54:35 crc kubenswrapper[4902]: I1202 15:54:35.461197 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/910db312-d28e-43b2-b759-88886bd7d7a7-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "910db312-d28e-43b2-b759-88886bd7d7a7" (UID: "910db312-d28e-43b2-b759-88886bd7d7a7"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:54:35 crc kubenswrapper[4902]: I1202 15:54:35.554825 4902 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/910db312-d28e-43b2-b759-88886bd7d7a7-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 02 15:54:35 crc kubenswrapper[4902]: I1202 15:54:35.815445 4902 scope.go:117] "RemoveContainer" containerID="3cc83d93c0d1b184af830e105f1a01111ad3aa89b9057baa63b8515aa64b70b1" Dec 02 15:54:35 crc kubenswrapper[4902]: I1202 15:54:35.870615 4902 scope.go:117] "RemoveContainer" containerID="d755351f80049ec6e022c1ec6f603ed1ab557d0f79ddbd3642fb2dbf2512ff9d" Dec 02 15:54:35 crc kubenswrapper[4902]: I1202 15:54:35.984031 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9kwz5/must-gather-9whft" Dec 02 15:54:37 crc kubenswrapper[4902]: I1202 15:54:37.119308 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="910db312-d28e-43b2-b759-88886bd7d7a7" path="/var/lib/kubelet/pods/910db312-d28e-43b2-b759-88886bd7d7a7/volumes" Dec 02 15:55:34 crc kubenswrapper[4902]: I1202 15:55:34.731635 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:55:34 crc kubenswrapper[4902]: I1202 15:55:34.732211 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:55:35 crc kubenswrapper[4902]: I1202 15:55:35.930931 4902 scope.go:117] "RemoveContainer" containerID="d93f1bbe1a15cd44ac9795e421e0496e16967903569ec0f46bd0a1dcfbe2cfcc" Dec 02 15:55:35 crc kubenswrapper[4902]: I1202 15:55:35.953716 4902 scope.go:117] "RemoveContainer" containerID="6ea99b5a2ce5431dc8cd2d36fbec85e8d7b4e58bec8026e9cee3d3d936bd3b3c" Dec 02 15:55:36 crc kubenswrapper[4902]: I1202 15:55:36.017507 4902 scope.go:117] "RemoveContainer" containerID="28a5ab8d8f5768938b6e939bedb01456f1d0be7f6953e97bfa8e797c8d2bdfda" Dec 02 15:55:36 crc kubenswrapper[4902]: I1202 15:55:36.075773 4902 scope.go:117] "RemoveContainer" containerID="4ef880a474a9bdf78e7fc55d0b275f5e571d164d7955b9fc2a9c0e6f0bc33588" Dec 02 15:55:36 crc kubenswrapper[4902]: I1202 15:55:36.121391 4902 scope.go:117] "RemoveContainer" containerID="36c85a376be5d74bc1482d87a4b059939d5f86f6d035b88cb306b14038bc3e02" Dec 02 15:56:04 crc kubenswrapper[4902]: I1202 15:56:04.731558 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:56:04 crc kubenswrapper[4902]: I1202 15:56:04.732088 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:56:34 crc kubenswrapper[4902]: I1202 15:56:34.732315 4902 patch_prober.go:28] interesting pod/machine-config-daemon-v8znh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 15:56:34 crc kubenswrapper[4902]: I1202 15:56:34.732972 4902 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 15:56:34 crc kubenswrapper[4902]: I1202 15:56:34.733031 4902 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" Dec 02 15:56:34 crc kubenswrapper[4902]: I1202 15:56:34.734078 4902 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7"} pod="openshift-machine-config-operator/machine-config-daemon-v8znh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 15:56:34 crc kubenswrapper[4902]: I1202 15:56:34.734142 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" containerName="machine-config-daemon" containerID="cri-o://efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7" gracePeriod=600 Dec 02 15:56:34 crc kubenswrapper[4902]: E1202 15:56:34.862146 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:56:35 crc kubenswrapper[4902]: I1202 15:56:35.276735 4902 generic.go:334] "Generic (PLEG): container finished" podID="c78ba9cf-533f-4683-8531-045256a5d819" containerID="efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7" exitCode=0 Dec 02 15:56:35 crc kubenswrapper[4902]: I1202 15:56:35.276794 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" event={"ID":"c78ba9cf-533f-4683-8531-045256a5d819","Type":"ContainerDied","Data":"efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7"} Dec 02 15:56:35 crc kubenswrapper[4902]: I1202 15:56:35.276845 4902 scope.go:117] "RemoveContainer" containerID="d4b452993d28c19f8db0d9055f4cfbc1fd8767ae2c03f2cd2b0edb49844d2e3c" Dec 02 15:56:35 crc kubenswrapper[4902]: I1202 15:56:35.278681 4902 scope.go:117] "RemoveContainer" containerID="efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7" Dec 02 15:56:35 crc kubenswrapper[4902]: E1202 15:56:35.279964 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:56:49 crc kubenswrapper[4902]: I1202 15:56:49.121836 4902 scope.go:117] "RemoveContainer" containerID="efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7" Dec 02 15:56:49 crc kubenswrapper[4902]: E1202 15:56:49.123126 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:56:49 crc kubenswrapper[4902]: I1202 15:56:49.968212 4902 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ksx8n"] Dec 02 15:56:49 crc kubenswrapper[4902]: E1202 15:56:49.968764 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33a6dd30-3ecb-428d-84d6-84bdd4b07206" containerName="registry-server" Dec 02 15:56:49 crc kubenswrapper[4902]: I1202 15:56:49.968793 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="33a6dd30-3ecb-428d-84d6-84bdd4b07206" containerName="registry-server" Dec 02 15:56:49 crc kubenswrapper[4902]: E1202 15:56:49.968808 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33a6dd30-3ecb-428d-84d6-84bdd4b07206" containerName="extract-content" Dec 02 15:56:49 crc kubenswrapper[4902]: I1202 15:56:49.968817 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="33a6dd30-3ecb-428d-84d6-84bdd4b07206" containerName="extract-content" Dec 02 15:56:49 crc kubenswrapper[4902]: E1202 15:56:49.968830 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="910db312-d28e-43b2-b759-88886bd7d7a7" containerName="gather" Dec 02 15:56:49 crc kubenswrapper[4902]: I1202 15:56:49.968839 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="910db312-d28e-43b2-b759-88886bd7d7a7" containerName="gather" Dec 02 15:56:49 crc kubenswrapper[4902]: E1202 15:56:49.968861 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33a6dd30-3ecb-428d-84d6-84bdd4b07206" containerName="extract-utilities" Dec 02 15:56:49 crc kubenswrapper[4902]: I1202 15:56:49.968868 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="33a6dd30-3ecb-428d-84d6-84bdd4b07206" containerName="extract-utilities" Dec 02 15:56:49 crc kubenswrapper[4902]: E1202 15:56:49.968893 4902 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="910db312-d28e-43b2-b759-88886bd7d7a7" containerName="copy" Dec 02 15:56:49 crc kubenswrapper[4902]: I1202 15:56:49.968901 4902 state_mem.go:107] "Deleted CPUSet assignment" podUID="910db312-d28e-43b2-b759-88886bd7d7a7" containerName="copy" Dec 02 15:56:49 crc kubenswrapper[4902]: I1202 15:56:49.969151 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="910db312-d28e-43b2-b759-88886bd7d7a7" containerName="copy" Dec 02 15:56:49 crc kubenswrapper[4902]: I1202 15:56:49.969171 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="33a6dd30-3ecb-428d-84d6-84bdd4b07206" containerName="registry-server" Dec 02 15:56:49 crc kubenswrapper[4902]: I1202 15:56:49.969196 4902 memory_manager.go:354] "RemoveStaleState removing state" podUID="910db312-d28e-43b2-b759-88886bd7d7a7" containerName="gather" Dec 02 15:56:49 crc kubenswrapper[4902]: I1202 15:56:49.971180 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:56:49 crc kubenswrapper[4902]: I1202 15:56:49.999021 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ksx8n"] Dec 02 15:56:50 crc kubenswrapper[4902]: I1202 15:56:50.005655 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25a72687-0ff0-4497-8534-ed9a6edef635-catalog-content\") pod \"redhat-operators-ksx8n\" (UID: \"25a72687-0ff0-4497-8534-ed9a6edef635\") " pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:56:50 crc kubenswrapper[4902]: I1202 15:56:50.005742 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25a72687-0ff0-4497-8534-ed9a6edef635-utilities\") pod \"redhat-operators-ksx8n\" (UID: \"25a72687-0ff0-4497-8534-ed9a6edef635\") " pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:56:50 crc kubenswrapper[4902]: I1202 15:56:50.007354 4902 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cs64\" (UniqueName: \"kubernetes.io/projected/25a72687-0ff0-4497-8534-ed9a6edef635-kube-api-access-2cs64\") pod \"redhat-operators-ksx8n\" (UID: \"25a72687-0ff0-4497-8534-ed9a6edef635\") " pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:56:50 crc kubenswrapper[4902]: I1202 15:56:50.109265 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cs64\" (UniqueName: \"kubernetes.io/projected/25a72687-0ff0-4497-8534-ed9a6edef635-kube-api-access-2cs64\") pod \"redhat-operators-ksx8n\" (UID: \"25a72687-0ff0-4497-8534-ed9a6edef635\") " pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:56:50 crc kubenswrapper[4902]: I1202 15:56:50.109422 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25a72687-0ff0-4497-8534-ed9a6edef635-catalog-content\") pod \"redhat-operators-ksx8n\" (UID: \"25a72687-0ff0-4497-8534-ed9a6edef635\") " pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:56:50 crc kubenswrapper[4902]: I1202 15:56:50.109480 4902 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25a72687-0ff0-4497-8534-ed9a6edef635-utilities\") pod \"redhat-operators-ksx8n\" (UID: \"25a72687-0ff0-4497-8534-ed9a6edef635\") " pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:56:50 crc kubenswrapper[4902]: I1202 15:56:50.110124 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25a72687-0ff0-4497-8534-ed9a6edef635-utilities\") pod \"redhat-operators-ksx8n\" (UID: \"25a72687-0ff0-4497-8534-ed9a6edef635\") " pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:56:50 crc kubenswrapper[4902]: I1202 15:56:50.110766 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25a72687-0ff0-4497-8534-ed9a6edef635-catalog-content\") pod \"redhat-operators-ksx8n\" (UID: \"25a72687-0ff0-4497-8534-ed9a6edef635\") " pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:56:50 crc kubenswrapper[4902]: I1202 15:56:50.131951 4902 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cs64\" (UniqueName: \"kubernetes.io/projected/25a72687-0ff0-4497-8534-ed9a6edef635-kube-api-access-2cs64\") pod \"redhat-operators-ksx8n\" (UID: \"25a72687-0ff0-4497-8534-ed9a6edef635\") " pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:56:50 crc kubenswrapper[4902]: I1202 15:56:50.302719 4902 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:56:50 crc kubenswrapper[4902]: I1202 15:56:50.758171 4902 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ksx8n"] Dec 02 15:56:51 crc kubenswrapper[4902]: I1202 15:56:51.469286 4902 generic.go:334] "Generic (PLEG): container finished" podID="25a72687-0ff0-4497-8534-ed9a6edef635" containerID="0e8e9f14715db55f0492f8b1480f0f658bf4e2cbd1efbcfa037cc97d209e01f2" exitCode=0 Dec 02 15:56:51 crc kubenswrapper[4902]: I1202 15:56:51.469351 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ksx8n" event={"ID":"25a72687-0ff0-4497-8534-ed9a6edef635","Type":"ContainerDied","Data":"0e8e9f14715db55f0492f8b1480f0f658bf4e2cbd1efbcfa037cc97d209e01f2"} Dec 02 15:56:51 crc kubenswrapper[4902]: I1202 15:56:51.469654 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ksx8n" event={"ID":"25a72687-0ff0-4497-8534-ed9a6edef635","Type":"ContainerStarted","Data":"4d0b58ce037300190e9be1f83545079605c735caa42a1a626902163ed05ae6ea"} Dec 02 15:56:52 crc kubenswrapper[4902]: I1202 15:56:52.481115 4902 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 15:56:54 crc kubenswrapper[4902]: I1202 15:56:54.499794 4902 generic.go:334] "Generic (PLEG): container finished" podID="25a72687-0ff0-4497-8534-ed9a6edef635" containerID="5b8c4ac9c6c4c50c9c3926b310ba96cc6e4d104d2445bf7c0a3a7e96efbc5736" exitCode=0 Dec 02 15:56:54 crc kubenswrapper[4902]: I1202 15:56:54.499917 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ksx8n" event={"ID":"25a72687-0ff0-4497-8534-ed9a6edef635","Type":"ContainerDied","Data":"5b8c4ac9c6c4c50c9c3926b310ba96cc6e4d104d2445bf7c0a3a7e96efbc5736"} Dec 02 15:56:56 crc kubenswrapper[4902]: I1202 15:56:56.518834 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ksx8n" event={"ID":"25a72687-0ff0-4497-8534-ed9a6edef635","Type":"ContainerStarted","Data":"3efd289581ad32cd51e0a500062bce6a2360d71bc482324d2c75591c72b227f8"} Dec 02 15:56:56 crc kubenswrapper[4902]: I1202 15:56:56.550430 4902 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ksx8n" podStartSLOduration=4.407875778 podStartE2EDuration="7.550402167s" podCreationTimestamp="2025-12-02 15:56:49 +0000 UTC" firstStartedPulling="2025-12-02 15:56:52.480863741 +0000 UTC m=+6043.672172450" lastFinishedPulling="2025-12-02 15:56:55.62339013 +0000 UTC m=+6046.814698839" observedRunningTime="2025-12-02 15:56:56.545120587 +0000 UTC m=+6047.736429316" watchObservedRunningTime="2025-12-02 15:56:56.550402167 +0000 UTC m=+6047.741710876" Dec 02 15:57:00 crc kubenswrapper[4902]: I1202 15:57:00.107093 4902 scope.go:117] "RemoveContainer" containerID="efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7" Dec 02 15:57:00 crc kubenswrapper[4902]: E1202 15:57:00.108888 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:57:00 crc kubenswrapper[4902]: I1202 15:57:00.303049 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:57:00 crc kubenswrapper[4902]: I1202 15:57:00.303500 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:57:01 crc kubenswrapper[4902]: I1202 15:57:01.366721 4902 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ksx8n" podUID="25a72687-0ff0-4497-8534-ed9a6edef635" containerName="registry-server" probeResult="failure" output=< Dec 02 15:57:01 crc kubenswrapper[4902]: timeout: failed to connect service ":50051" within 1s Dec 02 15:57:01 crc kubenswrapper[4902]: > Dec 02 15:57:10 crc kubenswrapper[4902]: I1202 15:57:10.366079 4902 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:57:10 crc kubenswrapper[4902]: I1202 15:57:10.466301 4902 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:57:10 crc kubenswrapper[4902]: I1202 15:57:10.611983 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ksx8n"] Dec 02 15:57:11 crc kubenswrapper[4902]: I1202 15:57:11.106707 4902 scope.go:117] "RemoveContainer" containerID="efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7" Dec 02 15:57:11 crc kubenswrapper[4902]: E1202 15:57:11.106981 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:57:11 crc kubenswrapper[4902]: I1202 15:57:11.691956 4902 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ksx8n" podUID="25a72687-0ff0-4497-8534-ed9a6edef635" containerName="registry-server" containerID="cri-o://3efd289581ad32cd51e0a500062bce6a2360d71bc482324d2c75591c72b227f8" gracePeriod=2 Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.207098 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.355000 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2cs64\" (UniqueName: \"kubernetes.io/projected/25a72687-0ff0-4497-8534-ed9a6edef635-kube-api-access-2cs64\") pod \"25a72687-0ff0-4497-8534-ed9a6edef635\" (UID: \"25a72687-0ff0-4497-8534-ed9a6edef635\") " Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.355147 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25a72687-0ff0-4497-8534-ed9a6edef635-utilities\") pod \"25a72687-0ff0-4497-8534-ed9a6edef635\" (UID: \"25a72687-0ff0-4497-8534-ed9a6edef635\") " Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.355306 4902 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25a72687-0ff0-4497-8534-ed9a6edef635-catalog-content\") pod \"25a72687-0ff0-4497-8534-ed9a6edef635\" (UID: \"25a72687-0ff0-4497-8534-ed9a6edef635\") " Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.357680 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25a72687-0ff0-4497-8534-ed9a6edef635-utilities" (OuterVolumeSpecName: "utilities") pod "25a72687-0ff0-4497-8534-ed9a6edef635" (UID: "25a72687-0ff0-4497-8534-ed9a6edef635"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.369053 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25a72687-0ff0-4497-8534-ed9a6edef635-kube-api-access-2cs64" (OuterVolumeSpecName: "kube-api-access-2cs64") pod "25a72687-0ff0-4497-8534-ed9a6edef635" (UID: "25a72687-0ff0-4497-8534-ed9a6edef635"). InnerVolumeSpecName "kube-api-access-2cs64". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.457234 4902 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25a72687-0ff0-4497-8534-ed9a6edef635-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.457270 4902 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2cs64\" (UniqueName: \"kubernetes.io/projected/25a72687-0ff0-4497-8534-ed9a6edef635-kube-api-access-2cs64\") on node \"crc\" DevicePath \"\"" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.471967 4902 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25a72687-0ff0-4497-8534-ed9a6edef635-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "25a72687-0ff0-4497-8534-ed9a6edef635" (UID: "25a72687-0ff0-4497-8534-ed9a6edef635"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.559093 4902 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25a72687-0ff0-4497-8534-ed9a6edef635-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.702223 4902 generic.go:334] "Generic (PLEG): container finished" podID="25a72687-0ff0-4497-8534-ed9a6edef635" containerID="3efd289581ad32cd51e0a500062bce6a2360d71bc482324d2c75591c72b227f8" exitCode=0 Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.702269 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ksx8n" event={"ID":"25a72687-0ff0-4497-8534-ed9a6edef635","Type":"ContainerDied","Data":"3efd289581ad32cd51e0a500062bce6a2360d71bc482324d2c75591c72b227f8"} Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.702296 4902 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ksx8n" event={"ID":"25a72687-0ff0-4497-8534-ed9a6edef635","Type":"ContainerDied","Data":"4d0b58ce037300190e9be1f83545079605c735caa42a1a626902163ed05ae6ea"} Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.702315 4902 scope.go:117] "RemoveContainer" containerID="3efd289581ad32cd51e0a500062bce6a2360d71bc482324d2c75591c72b227f8" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.702321 4902 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ksx8n" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.727922 4902 scope.go:117] "RemoveContainer" containerID="5b8c4ac9c6c4c50c9c3926b310ba96cc6e4d104d2445bf7c0a3a7e96efbc5736" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.745250 4902 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ksx8n"] Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.755356 4902 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ksx8n"] Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.770457 4902 scope.go:117] "RemoveContainer" containerID="0e8e9f14715db55f0492f8b1480f0f658bf4e2cbd1efbcfa037cc97d209e01f2" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.808467 4902 scope.go:117] "RemoveContainer" containerID="3efd289581ad32cd51e0a500062bce6a2360d71bc482324d2c75591c72b227f8" Dec 02 15:57:12 crc kubenswrapper[4902]: E1202 15:57:12.808807 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3efd289581ad32cd51e0a500062bce6a2360d71bc482324d2c75591c72b227f8\": container with ID starting with 3efd289581ad32cd51e0a500062bce6a2360d71bc482324d2c75591c72b227f8 not found: ID does not exist" containerID="3efd289581ad32cd51e0a500062bce6a2360d71bc482324d2c75591c72b227f8" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.808845 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3efd289581ad32cd51e0a500062bce6a2360d71bc482324d2c75591c72b227f8"} err="failed to get container status \"3efd289581ad32cd51e0a500062bce6a2360d71bc482324d2c75591c72b227f8\": rpc error: code = NotFound desc = could not find container \"3efd289581ad32cd51e0a500062bce6a2360d71bc482324d2c75591c72b227f8\": container with ID starting with 3efd289581ad32cd51e0a500062bce6a2360d71bc482324d2c75591c72b227f8 not found: ID does not exist" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.808865 4902 scope.go:117] "RemoveContainer" containerID="5b8c4ac9c6c4c50c9c3926b310ba96cc6e4d104d2445bf7c0a3a7e96efbc5736" Dec 02 15:57:12 crc kubenswrapper[4902]: E1202 15:57:12.809069 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b8c4ac9c6c4c50c9c3926b310ba96cc6e4d104d2445bf7c0a3a7e96efbc5736\": container with ID starting with 5b8c4ac9c6c4c50c9c3926b310ba96cc6e4d104d2445bf7c0a3a7e96efbc5736 not found: ID does not exist" containerID="5b8c4ac9c6c4c50c9c3926b310ba96cc6e4d104d2445bf7c0a3a7e96efbc5736" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.809090 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b8c4ac9c6c4c50c9c3926b310ba96cc6e4d104d2445bf7c0a3a7e96efbc5736"} err="failed to get container status \"5b8c4ac9c6c4c50c9c3926b310ba96cc6e4d104d2445bf7c0a3a7e96efbc5736\": rpc error: code = NotFound desc = could not find container \"5b8c4ac9c6c4c50c9c3926b310ba96cc6e4d104d2445bf7c0a3a7e96efbc5736\": container with ID starting with 5b8c4ac9c6c4c50c9c3926b310ba96cc6e4d104d2445bf7c0a3a7e96efbc5736 not found: ID does not exist" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.809106 4902 scope.go:117] "RemoveContainer" containerID="0e8e9f14715db55f0492f8b1480f0f658bf4e2cbd1efbcfa037cc97d209e01f2" Dec 02 15:57:12 crc kubenswrapper[4902]: E1202 15:57:12.809478 4902 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e8e9f14715db55f0492f8b1480f0f658bf4e2cbd1efbcfa037cc97d209e01f2\": container with ID starting with 0e8e9f14715db55f0492f8b1480f0f658bf4e2cbd1efbcfa037cc97d209e01f2 not found: ID does not exist" containerID="0e8e9f14715db55f0492f8b1480f0f658bf4e2cbd1efbcfa037cc97d209e01f2" Dec 02 15:57:12 crc kubenswrapper[4902]: I1202 15:57:12.809498 4902 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e8e9f14715db55f0492f8b1480f0f658bf4e2cbd1efbcfa037cc97d209e01f2"} err="failed to get container status \"0e8e9f14715db55f0492f8b1480f0f658bf4e2cbd1efbcfa037cc97d209e01f2\": rpc error: code = NotFound desc = could not find container \"0e8e9f14715db55f0492f8b1480f0f658bf4e2cbd1efbcfa037cc97d209e01f2\": container with ID starting with 0e8e9f14715db55f0492f8b1480f0f658bf4e2cbd1efbcfa037cc97d209e01f2 not found: ID does not exist" Dec 02 15:57:13 crc kubenswrapper[4902]: I1202 15:57:13.139433 4902 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25a72687-0ff0-4497-8534-ed9a6edef635" path="/var/lib/kubelet/pods/25a72687-0ff0-4497-8534-ed9a6edef635/volumes" Dec 02 15:57:22 crc kubenswrapper[4902]: I1202 15:57:22.107763 4902 scope.go:117] "RemoveContainer" containerID="efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7" Dec 02 15:57:22 crc kubenswrapper[4902]: E1202 15:57:22.108493 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:57:37 crc kubenswrapper[4902]: I1202 15:57:37.106808 4902 scope.go:117] "RemoveContainer" containerID="efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7" Dec 02 15:57:37 crc kubenswrapper[4902]: E1202 15:57:37.107647 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:57:50 crc kubenswrapper[4902]: I1202 15:57:50.106949 4902 scope.go:117] "RemoveContainer" containerID="efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7" Dec 02 15:57:50 crc kubenswrapper[4902]: E1202 15:57:50.107944 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:58:03 crc kubenswrapper[4902]: I1202 15:58:03.110670 4902 scope.go:117] "RemoveContainer" containerID="efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7" Dec 02 15:58:03 crc kubenswrapper[4902]: E1202 15:58:03.111878 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:58:18 crc kubenswrapper[4902]: I1202 15:58:18.107080 4902 scope.go:117] "RemoveContainer" containerID="efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7" Dec 02 15:58:18 crc kubenswrapper[4902]: E1202 15:58:18.107803 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:58:32 crc kubenswrapper[4902]: I1202 15:58:32.107088 4902 scope.go:117] "RemoveContainer" containerID="efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7" Dec 02 15:58:32 crc kubenswrapper[4902]: E1202 15:58:32.108147 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:58:47 crc kubenswrapper[4902]: I1202 15:58:47.107707 4902 scope.go:117] "RemoveContainer" containerID="efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7" Dec 02 15:58:47 crc kubenswrapper[4902]: E1202 15:58:47.109107 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" Dec 02 15:59:02 crc kubenswrapper[4902]: I1202 15:59:02.107021 4902 scope.go:117] "RemoveContainer" containerID="efc93f99c0f66a335cfd25d65e87cd52b52bc02b5f1d0c63b0b6650c656468a7" Dec 02 15:59:02 crc kubenswrapper[4902]: E1202 15:59:02.107970 4902 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-v8znh_openshift-machine-config-operator(c78ba9cf-533f-4683-8531-045256a5d819)\"" pod="openshift-machine-config-operator/machine-config-daemon-v8znh" podUID="c78ba9cf-533f-4683-8531-045256a5d819" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515113606317024450 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015113606320017357 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015113571664016516 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015113571665015467 5ustar corecore